1 // Copyright (c) 2001-2011 Hartmut Kaiser
2 //
3 // Distributed under the Boost Software License, Version 1.0. (See accompanying
4 // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5
6 #include <boost/config/warning_disable.hpp>
7 #include <boost/detail/lightweight_test.hpp>
8
9 #include <boost/spirit/include/lex_lexertl.hpp>
10 #include <boost/spirit/include/lex_lexertl_position_token.hpp>
11 #include <boost/spirit/include/phoenix_object.hpp>
12 #include <boost/spirit/include/phoenix_operator.hpp>
13 #include <boost/spirit/include/phoenix_container.hpp>
14 #include <boost/spirit/include/qi_numeric.hpp>
15
16 namespace spirit = boost::spirit;
17 namespace lex = boost::spirit::lex;
18 namespace phoenix = boost::phoenix;
19 namespace mpl = boost::mpl;
20
21 ///////////////////////////////////////////////////////////////////////////////
22 enum tokenids
23 {
24 ID_INT = 1000,
25 ID_DOUBLE
26 };
27
28 template <typename Lexer>
29 struct token_definitions : lex::lexer<Lexer>
30 {
token_definitionstoken_definitions31 token_definitions()
32 {
33 this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
34 this->self.add_pattern("OCTALDIGIT", "[0-7]");
35 this->self.add_pattern("DIGIT", "[0-9]");
36
37 this->self.add_pattern("OPTSIGN", "[-+]?");
38 this->self.add_pattern("EXPSTART", "[eE][-+]");
39 this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
40
41 // define tokens and associate them with the lexer
42 int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*";
43 int_.id(ID_INT);
44
45 double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
46 double_.id(ID_DOUBLE);
47
48 whitespace = "[ \t\n]+";
49
50 this->self =
51 double_
52 | int_
53 | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
54 ;
55 }
56
57 lex::token_def<int> int_;
58 lex::token_def<double> double_;
59 lex::token_def<lex::omit> whitespace;
60 };
61
62 template <typename Lexer>
63 struct token_definitions_with_state : lex::lexer<Lexer>
64 {
token_definitions_with_statetoken_definitions_with_state65 token_definitions_with_state()
66 {
67 this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
68 this->self.add_pattern("OCTALDIGIT", "[0-7]");
69 this->self.add_pattern("DIGIT", "[0-9]");
70
71 this->self.add_pattern("OPTSIGN", "[-+]?");
72 this->self.add_pattern("EXPSTART", "[eE][-+]");
73 this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
74
75 this->self.add_state();
76 this->self.add_state("INT");
77 this->self.add_state("DOUBLE");
78
79 // define tokens and associate them with the lexer
80 int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*";
81 int_.id(ID_INT);
82
83 double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
84 double_.id(ID_DOUBLE);
85
86 whitespace = "[ \t\n]+";
87
88 this->self("*") =
89 double_ [ lex::_state = "DOUBLE"]
90 | int_ [ lex::_state = "INT" ]
91 | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
92 ;
93 }
94
95 lex::token_def<int> int_;
96 lex::token_def<double> double_;
97 lex::token_def<lex::omit> whitespace;
98 };
99
100 ///////////////////////////////////////////////////////////////////////////////
101 template <typename Token>
102 inline bool
test_token_ids(int const * ids,std::vector<Token> const & tokens)103 test_token_ids(int const* ids, std::vector<Token> const& tokens)
104 {
105 BOOST_FOREACH(Token const& t, tokens)
106 {
107 if (*ids == -1)
108 return false; // reached end of expected data
109
110 if (t.id() != static_cast<std::size_t>(*ids)) // token id must match
111 return false;
112
113 ++ids;
114 }
115
116 return (*ids == -1) ? true : false;
117 }
118
119 ///////////////////////////////////////////////////////////////////////////////
120 template <typename Token>
121 inline bool
test_token_states(std::size_t const * states,std::vector<Token> const & tokens)122 test_token_states(std::size_t const* states, std::vector<Token> const& tokens)
123 {
124 BOOST_FOREACH(Token const& t, tokens)
125 {
126 if (*states == std::size_t(-1))
127 return false; // reached end of expected data
128
129 if (t.state() != *states) // token state must match
130 return false;
131
132 ++states;
133 }
134
135 return (*states == std::size_t(-1)) ? true : false;
136 }
137
138 ///////////////////////////////////////////////////////////////////////////////
139 struct position_type
140 {
141 std::size_t begin, end;
142 };
143
144 template <typename Iterator, typename Token>
145 inline bool
test_token_positions(Iterator begin,position_type const * positions,std::vector<Token> const & tokens)146 test_token_positions(Iterator begin, position_type const* positions,
147 std::vector<Token> const& tokens)
148 {
149 BOOST_FOREACH(Token const& t, tokens)
150 {
151 if (positions->begin == std::size_t(-1) &&
152 positions->end == std::size_t(-1))
153 {
154 return false; // reached end of expected data
155 }
156
157 boost::iterator_range<Iterator> matched = t.matched();
158 std::size_t start = std::distance(begin, matched.begin());
159 std::size_t end = std::distance(begin, matched.end());
160
161 // position must match
162 if (start != positions->begin || end != positions->end)
163 return false;
164
165 ++positions;
166 }
167
168 return (positions->begin == std::size_t(-1) &&
169 positions->end == std::size_t(-1)) ? true : false;
170 }
171
172 ///////////////////////////////////////////////////////////////////////////////
173 template <typename T, typename Token>
174 inline bool
test_token_values(boost::optional<T> const * values,std::vector<Token> const & tokens)175 test_token_values(boost::optional<T> const* values, std::vector<Token> const& tokens)
176 {
177 BOOST_FOREACH(Token const& t, tokens)
178 {
179 if (values->is_initialized() && values->get() == 0)
180 return false; // reached end of expected data
181
182 if (values->is_initialized()) {
183 T val;
184 spirit::traits::assign_to(t, val);
185 if (val != values->get()) // token value must match
186 return false;
187 }
188
189 ++values;
190 }
191
192 return (values->is_initialized() && values->get() == 0) ? true : false;
193 }
194
195 ///////////////////////////////////////////////////////////////////////////////
main()196 int main()
197 {
198 using boost::none;
199 typedef std::string::iterator base_iterator_type;
200 std::string input(" 01 1.2 -2 03 2.3e6 -3.4");
201 int ids[] = { ID_INT, ID_DOUBLE, ID_INT, ID_INT, ID_DOUBLE, ID_DOUBLE, -1 };
202 std::size_t states[] = { 0, 1, 2, 1, 1, 2, std::size_t(-1) };
203 position_type positions[] =
204 {
205 { 1, 3 }, { 4, 7 }, { 8, 10 }, { 11, 13 }, { 15, 20 }, { 21, 25 },
206 { std::size_t(-1), std::size_t(-1) }
207 };
208 boost::optional<int> ivalues[] = {
209 1, none, -2,
210 3, none, none,
211 0
212 };
213 boost::optional<double> dvalues[] = {
214 none, 1.2, none,
215 none, 2.3e6, -3.4,
216 0.0
217 };
218
219 // token type: token id, iterator_pair as token value, no state
220 {
221 typedef lex::lexertl::token<
222 base_iterator_type, mpl::vector<double, int>, mpl::false_> token_type;
223 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
224
225 token_definitions<lexer_type> lexer;
226 std::vector<token_type> tokens;
227 base_iterator_type first = input.begin();
228
229 using phoenix::arg_names::_1;
230 BOOST_TEST(lex::tokenize(first, input.end(), lexer
231 , phoenix::push_back(phoenix::ref(tokens), _1)));
232
233 BOOST_TEST(test_token_ids(ids, tokens));
234 BOOST_TEST(test_token_values(ivalues, tokens));
235 BOOST_TEST(test_token_values(dvalues, tokens));
236 }
237
238 {
239 typedef lex::lexertl::position_token<
240 base_iterator_type, mpl::vector<double, int>, mpl::false_> token_type;
241 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
242
243 token_definitions<lexer_type> lexer;
244 std::vector<token_type> tokens;
245 base_iterator_type first = input.begin();
246
247 using phoenix::arg_names::_1;
248 BOOST_TEST(lex::tokenize(first, input.end(), lexer
249 , phoenix::push_back(phoenix::ref(tokens), _1)));
250
251 BOOST_TEST(test_token_ids(ids, tokens));
252 BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
253 BOOST_TEST(test_token_values(ivalues, tokens));
254 BOOST_TEST(test_token_values(dvalues, tokens));
255 }
256
257 // token type: holds token id, state, iterator_pair as token value
258 {
259 typedef lex::lexertl::token<
260 base_iterator_type, mpl::vector<double, int>, mpl::true_> token_type;
261 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
262
263 token_definitions_with_state<lexer_type> lexer;
264 std::vector<token_type> tokens;
265 base_iterator_type first = input.begin();
266
267 using phoenix::arg_names::_1;
268 BOOST_TEST(lex::tokenize(first, input.end(), lexer
269 , phoenix::push_back(phoenix::ref(tokens), _1)));
270
271 BOOST_TEST(test_token_ids(ids, tokens));
272 BOOST_TEST(test_token_states(states, tokens));
273 BOOST_TEST(test_token_values(ivalues, tokens));
274 BOOST_TEST(test_token_values(dvalues, tokens));
275 }
276
277 {
278 typedef lex::lexertl::position_token<
279 base_iterator_type, mpl::vector<double, int>, mpl::true_> token_type;
280 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
281
282 token_definitions_with_state<lexer_type> lexer;
283 std::vector<token_type> tokens;
284 base_iterator_type first = input.begin();
285
286 using phoenix::arg_names::_1;
287 BOOST_TEST(lex::tokenize(first, input.end(), lexer
288 , phoenix::push_back(phoenix::ref(tokens), _1)));
289
290 BOOST_TEST(test_token_ids(ids, tokens));
291 BOOST_TEST(test_token_states(states, tokens));
292 BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
293 BOOST_TEST(test_token_values(ivalues, tokens));
294 BOOST_TEST(test_token_values(dvalues, tokens));
295 }
296
297 return boost::report_errors();
298 }
299