1 // Copyright (c) 2001-2011 Hartmut Kaiser
2 //
3 // Distributed under the Boost Software License, Version 1.0. (See accompanying
4 // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
5
6 #include <boost/config/warning_disable.hpp>
7 #include <boost/detail/lightweight_test.hpp>
8
9 #include <boost/spirit/include/lex_lexertl.hpp>
10 #include <boost/spirit/include/lex_lexertl_position_token.hpp>
11 #include <boost/spirit/include/phoenix_object.hpp>
12 #include <boost/spirit/include/phoenix_operator.hpp>
13 #include <boost/spirit/include/phoenix_container.hpp>
14
15 namespace lex = boost::spirit::lex;
16 namespace phoenix = boost::phoenix;
17 namespace mpl = boost::mpl;
18
19 ///////////////////////////////////////////////////////////////////////////////
20 enum tokenids
21 {
22 ID_INT = 1000,
23 ID_DOUBLE
24 };
25
26 template <typename Lexer>
27 struct token_definitions : lex::lexer<Lexer>
28 {
token_definitionstoken_definitions29 token_definitions()
30 {
31 this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
32 this->self.add_pattern("OCTALDIGIT", "[0-7]");
33 this->self.add_pattern("DIGIT", "[0-9]");
34
35 this->self.add_pattern("OPTSIGN", "[-+]?");
36 this->self.add_pattern("EXPSTART", "[eE][-+]");
37 this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
38
39 // define tokens and associate them with the lexer
40 int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*";
41 int_.id(ID_INT);
42
43 double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
44 double_.id(ID_DOUBLE);
45
46 whitespace = "[ \t\n]+";
47
48 this->self =
49 double_
50 | int_
51 | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
52 ;
53 }
54
55 lex::token_def<lex::omit> int_;
56 lex::token_def<lex::omit> double_;
57 lex::token_def<lex::omit> whitespace;
58 };
59
60 template <typename Lexer>
61 struct token_definitions_with_state : lex::lexer<Lexer>
62 {
token_definitions_with_statetoken_definitions_with_state63 token_definitions_with_state()
64 {
65 this->self.add_pattern("HEXDIGIT", "[0-9a-fA-F]");
66 this->self.add_pattern("OCTALDIGIT", "[0-7]");
67 this->self.add_pattern("DIGIT", "[0-9]");
68
69 this->self.add_pattern("OPTSIGN", "[-+]?");
70 this->self.add_pattern("EXPSTART", "[eE][-+]");
71 this->self.add_pattern("EXPONENT", "[eE]{OPTSIGN}{DIGIT}+");
72
73 this->self.add_state();
74 this->self.add_state("INT");
75 this->self.add_state("DOUBLE");
76
77 // define tokens and associate them with the lexer
78 int_ = "(0x|0X){HEXDIGIT}+|0{OCTALDIGIT}*|{OPTSIGN}[1-9]{DIGIT}*";
79 int_.id(ID_INT);
80
81 double_ = "{OPTSIGN}({DIGIT}*\\.{DIGIT}+|{DIGIT}+\\.){EXPONENT}?|{DIGIT}+{EXPONENT}";
82 double_.id(ID_DOUBLE);
83
84 whitespace = "[ \t\n]+";
85
86 this->self("*") =
87 double_ [ lex::_state = "DOUBLE"]
88 | int_ [ lex::_state = "INT" ]
89 | whitespace[ lex::_pass = lex::pass_flags::pass_ignore ]
90 ;
91 }
92
93 lex::token_def<lex::omit> int_;
94 lex::token_def<lex::omit> double_;
95 lex::token_def<lex::omit> whitespace;
96 };
97
98 ///////////////////////////////////////////////////////////////////////////////
99 template <typename Token>
100 inline bool
test_token_ids(int const * ids,std::vector<Token> const & tokens)101 test_token_ids(int const* ids, std::vector<Token> const& tokens)
102 {
103 BOOST_FOREACH(Token const& t, tokens)
104 {
105 if (*ids == -1)
106 return false; // reached end of expected data
107
108 if (t.id() != static_cast<std::size_t>(*ids)) // token id must match
109 return false;
110 ++ids;
111 }
112
113 return (*ids == -1) ? true : false;
114 }
115
116 template <typename Token>
117 inline bool
test_token_states(std::size_t const * states,std::vector<Token> const & tokens)118 test_token_states(std::size_t const* states, std::vector<Token> const& tokens)
119 {
120 BOOST_FOREACH(Token const& t, tokens)
121 {
122 if (*states == std::size_t(-1))
123 return false; // reached end of expected data
124
125 if (t.state() != *states) // token state must match
126 return false;
127 ++states;
128 }
129
130 return (*states == std::size_t(-1)) ? true : false;
131 }
132
133 ///////////////////////////////////////////////////////////////////////////////
134 struct position_type
135 {
136 std::size_t begin, end;
137 };
138
139 template <typename Iterator, typename Token>
140 inline bool
test_token_positions(Iterator begin,position_type const * positions,std::vector<Token> const & tokens)141 test_token_positions(Iterator begin, position_type const* positions,
142 std::vector<Token> const& tokens)
143 {
144 BOOST_FOREACH(Token const& t, tokens)
145 {
146 if (positions->begin == std::size_t(-1) &&
147 positions->end == std::size_t(-1))
148 {
149 return false; // reached end of expected data
150 }
151
152 boost::iterator_range<Iterator> matched = t.matched();
153 std::size_t start = std::distance(begin, matched.begin());
154 std::size_t end = std::distance(begin, matched.end());
155
156 // position must match
157 if (start != positions->begin || end != positions->end)
158 return false;
159
160 ++positions;
161 }
162
163 return (positions->begin == std::size_t(-1) &&
164 positions->end == std::size_t(-1)) ? true : false;
165 }
166
167 ///////////////////////////////////////////////////////////////////////////////
main()168 int main()
169 {
170 typedef std::string::iterator base_iterator_type;
171 std::string input(" 01 1.2 -2 0x3 2.3e6 -3.4");
172 int ids[] = { ID_INT, ID_DOUBLE, ID_INT, ID_INT, ID_DOUBLE, ID_DOUBLE, -1 };
173 std::size_t states[] = { 0, 1, 2, 1, 1, 2, std::size_t(-1) };
174 position_type positions[] =
175 {
176 { 1, 3 }, { 4, 7 }, { 8, 10 }, { 11, 14 }, { 15, 20 }, { 21, 25 },
177 { std::size_t(-1), std::size_t(-1) }
178 };
179
180 // token type: token id, iterator_pair as token value, no state
181 {
182 typedef lex::lexertl::token<
183 base_iterator_type, mpl::vector<>, mpl::false_> token_type;
184 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
185
186 token_definitions<lexer_type> lexer;
187 std::vector<token_type> tokens;
188 base_iterator_type first = input.begin();
189
190 using phoenix::arg_names::_1;
191 BOOST_TEST(lex::tokenize(first, input.end(), lexer
192 , phoenix::push_back(phoenix::ref(tokens), _1)));
193
194 BOOST_TEST(test_token_ids(ids, tokens));
195 }
196
197 {
198 typedef lex::lexertl::position_token<
199 base_iterator_type, mpl::vector<>, mpl::false_> token_type;
200 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
201
202 token_definitions<lexer_type> lexer;
203 std::vector<token_type> tokens;
204 base_iterator_type first = input.begin();
205
206 using phoenix::arg_names::_1;
207 BOOST_TEST(lex::tokenize(first, input.end(), lexer
208 , phoenix::push_back(phoenix::ref(tokens), _1)));
209
210 BOOST_TEST(test_token_ids(ids, tokens));
211 BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
212 }
213
214 // token type: holds token id, state, iterator_pair as token value
215 {
216 typedef lex::lexertl::token<
217 base_iterator_type, mpl::vector<>, mpl::true_> token_type;
218 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
219
220 token_definitions_with_state<lexer_type> lexer;
221 std::vector<token_type> tokens;
222 base_iterator_type first = input.begin();
223
224 using phoenix::arg_names::_1;
225 BOOST_TEST(lex::tokenize(first, input.end(), lexer
226 , phoenix::push_back(phoenix::ref(tokens), _1)));
227
228 BOOST_TEST(test_token_ids(ids, tokens));
229 BOOST_TEST(test_token_states(states, tokens));
230 }
231
232 {
233 typedef lex::lexertl::position_token<
234 base_iterator_type, mpl::vector<>, mpl::true_> token_type;
235 typedef lex::lexertl::actor_lexer<token_type> lexer_type;
236
237 token_definitions_with_state<lexer_type> lexer;
238 std::vector<token_type> tokens;
239 base_iterator_type first = input.begin();
240
241 using phoenix::arg_names::_1;
242 BOOST_TEST(lex::tokenize(first, input.end(), lexer
243 , phoenix::push_back(phoenix::ref(tokens), _1)));
244
245 BOOST_TEST(test_token_ids(ids, tokens));
246 BOOST_TEST(test_token_states(states, tokens));
247 BOOST_TEST(test_token_positions(input.begin(), positions, tokens));
248 }
249
250 return boost::report_errors();
251 }
252