1 // pest. The Elegant Parser
2 // Copyright (c) 2018 Dragoș Tiselice
3 //
4 // Licensed under the Apache License, Version 2.0
5 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
6 // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
7 // option. All files in the project carrying such notice may not be copied,
8 // modified, or distributed except according to those terms.
9
10 use std::fmt;
11 use std::rc::Rc;
12 use std::str;
13
14 use super::queueable_token::QueueableToken;
15 use position;
16 use token::Token;
17 use RuleType;
18
19 /// An iterator over [`Token`]s. It is created by [`Pair::tokens`] and [`Pairs::tokens`].
20 ///
21 /// [`Token`]: ../enum.Token.html
22 /// [`Pair::tokens`]: struct.Pair.html#method.tokens
23 /// [`Pairs::tokens`]: struct.Pairs.html#method.tokens
24 #[derive(Clone)]
25 pub struct Tokens<'i, R> {
26 /// # Safety:
27 ///
28 /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
29 queue: Rc<Vec<QueueableToken<R>>>,
30 input: &'i str,
31 start: usize,
32 end: usize,
33 }
34
35 // TODO(safety): QueueableTokens must be valid indices into input.
new<R: RuleType>( queue: Rc<Vec<QueueableToken<R>>>, input: &str, start: usize, end: usize, ) -> Tokens<R>36 pub fn new<R: RuleType>(
37 queue: Rc<Vec<QueueableToken<R>>>,
38 input: &str,
39 start: usize,
40 end: usize,
41 ) -> Tokens<R> {
42 if cfg!(debug_assertions) {
43 for tok in queue.iter() {
44 match *tok {
45 QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
46 assert!(
47 input.get(input_pos..).is_some(),
48 " UNSAFE `Tokens` CREATED "
49 )
50 }
51 }
52 }
53 }
54
55 Tokens {
56 queue,
57 input,
58 start,
59 end,
60 }
61 }
62
63 impl<'i, R: RuleType> Tokens<'i, R> {
create_token(&self, index: usize) -> Token<'i, R>64 fn create_token(&self, index: usize) -> Token<'i, R> {
65 match self.queue[index] {
66 QueueableToken::Start {
67 end_token_index,
68 input_pos,
69 } => {
70 let rule = match self.queue[end_token_index] {
71 QueueableToken::End { rule, .. } => rule,
72 _ => unreachable!(),
73 };
74
75 Token::Start {
76 rule,
77 // QueueableTokens are safely created.
78 pos: unsafe { position::Position::new_unchecked(self.input, input_pos) },
79 }
80 }
81 QueueableToken::End {
82 rule, input_pos, ..
83 } => {
84 Token::End {
85 rule,
86 // QueueableTokens are safely created.
87 pos: unsafe { position::Position::new_unchecked(self.input, input_pos) },
88 }
89 }
90 }
91 }
92 }
93
94 impl<'i, R: RuleType> Iterator for Tokens<'i, R> {
95 type Item = Token<'i, R>;
96
next(&mut self) -> Option<Self::Item>97 fn next(&mut self) -> Option<Self::Item> {
98 if self.start >= self.end {
99 return None;
100 }
101
102 let token = self.create_token(self.start);
103
104 self.start += 1;
105
106 Some(token)
107 }
108 }
109
110 impl<'i, R: RuleType> DoubleEndedIterator for Tokens<'i, R> {
next_back(&mut self) -> Option<Self::Item>111 fn next_back(&mut self) -> Option<Self::Item> {
112 if self.end <= self.start {
113 return None;
114 }
115
116 let token = self.create_token(self.end - 1);
117
118 self.end -= 1;
119
120 Some(token)
121 }
122 }
123
124 impl<'i, R: RuleType> fmt::Debug for Tokens<'i, R> {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result125 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
126 f.debug_list().entries(self.clone()).finish()
127 }
128 }
129
130 #[cfg(test)]
131 mod tests {
132 use super::super::super::macros::tests::*;
133 use super::super::super::Parser;
134 use super::Token;
135
136 #[test]
double_ended_iter_for_tokens()137 fn double_ended_iter_for_tokens() {
138 let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
139 let mut tokens = pairs.clone().tokens().collect::<Vec<Token<Rule>>>();
140 tokens.reverse();
141 let reverse_tokens = pairs.tokens().rev().collect::<Vec<Token<Rule>>>();
142 assert_eq!(tokens, reverse_tokens);
143 }
144 }
145