1 // pest. The Elegant Parser
2 // Copyright (c) 2018 Dragoș Tiselice
3 //
4 // Licensed under the Apache License, Version 2.0
5 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
6 // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
7 // option. All files in the project carrying such notice may not be copied,
8 // modified, or distributed except according to those terms.
9
10 use alloc::rc::Rc;
11 use alloc::vec::Vec;
12 use core::fmt;
13 use core::str;
14
15 use super::queueable_token::QueueableToken;
16 use crate::position;
17 use crate::token::Token;
18 use crate::RuleType;
19
20 /// An iterator over [`Token`]s. It is created by [`Pair::tokens`] and [`Pairs::tokens`].
21 ///
22 /// [`Token`]: ../enum.Token.html
23 /// [`Pair::tokens`]: struct.Pair.html#method.tokens
24 /// [`Pairs::tokens`]: struct.Pairs.html#method.tokens
25 #[derive(Clone)]
26 pub struct Tokens<'i, R> {
27 queue: Rc<Vec<QueueableToken<'i, R>>>,
28 input: &'i str,
29 start: usize,
30 end: usize,
31 }
32
new<'i, R: RuleType>( queue: Rc<Vec<QueueableToken<'i, R>>>, input: &'i str, start: usize, end: usize, ) -> Tokens<'i, R>33 pub fn new<'i, R: RuleType>(
34 queue: Rc<Vec<QueueableToken<'i, R>>>,
35 input: &'i str,
36 start: usize,
37 end: usize,
38 ) -> Tokens<'i, R> {
39 if cfg!(debug_assertions) {
40 for tok in queue.iter() {
41 match *tok {
42 QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
43 assert!(
44 input.get(input_pos..).is_some(),
45 " INVALID `Tokens` CREATED "
46 )
47 }
48 }
49 }
50 }
51
52 Tokens {
53 queue,
54 input,
55 start,
56 end,
57 }
58 }
59
60 impl<'i, R: RuleType> Tokens<'i, R> {
create_token(&self, index: usize) -> Token<'i, R>61 fn create_token(&self, index: usize) -> Token<'i, R> {
62 match self.queue[index] {
63 QueueableToken::Start {
64 end_token_index,
65 input_pos,
66 } => {
67 let rule = match self.queue[end_token_index] {
68 QueueableToken::End { rule, .. } => rule,
69 _ => unreachable!(),
70 };
71
72 Token::Start {
73 rule,
74 pos: position::Position::new_internal(self.input, input_pos),
75 }
76 }
77 QueueableToken::End {
78 rule, input_pos, ..
79 } => Token::End {
80 rule,
81 pos: position::Position::new_internal(self.input, input_pos),
82 },
83 }
84 }
85 }
86
87 impl<'i, R: RuleType> ExactSizeIterator for Tokens<'i, R> {
len(&self) -> usize88 fn len(&self) -> usize {
89 self.end - self.start
90 }
91 }
92
93 impl<'i, R: RuleType> Iterator for Tokens<'i, R> {
94 type Item = Token<'i, R>;
95
next(&mut self) -> Option<Self::Item>96 fn next(&mut self) -> Option<Self::Item> {
97 if self.start >= self.end {
98 return None;
99 }
100
101 let token = self.create_token(self.start);
102
103 self.start += 1;
104
105 Some(token)
106 }
107
size_hint(&self) -> (usize, Option<usize>)108 fn size_hint(&self) -> (usize, Option<usize>) {
109 let len = <Self as ExactSizeIterator>::len(self);
110 (len, Some(len))
111 }
112 }
113
114 impl<'i, R: RuleType> DoubleEndedIterator for Tokens<'i, R> {
next_back(&mut self) -> Option<Self::Item>115 fn next_back(&mut self) -> Option<Self::Item> {
116 if self.end <= self.start {
117 return None;
118 }
119
120 let token = self.create_token(self.end - 1);
121
122 self.end -= 1;
123
124 Some(token)
125 }
126 }
127
128 impl<'i, R: RuleType> fmt::Debug for Tokens<'i, R> {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result129 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
130 f.debug_list().entries(self.clone()).finish()
131 }
132 }
133
134 #[cfg(test)]
135 mod tests {
136 use super::super::super::macros::tests::*;
137 use super::super::super::Parser;
138 use super::Token;
139 use alloc::vec::Vec;
140
141 #[test]
double_ended_iter_for_tokens()142 fn double_ended_iter_for_tokens() {
143 let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
144 let mut tokens = pairs.clone().tokens().collect::<Vec<Token<'_, Rule>>>();
145 tokens.reverse();
146 let reverse_tokens = pairs.tokens().rev().collect::<Vec<Token<'_, Rule>>>();
147 assert_eq!(tokens, reverse_tokens);
148 }
149
150 #[test]
exact_size_iter_for_tokens()151 fn exact_size_iter_for_tokens() {
152 let tokens = AbcParser::parse(Rule::a, "abcde").unwrap().tokens();
153 assert_eq!(tokens.len(), tokens.count());
154
155 let tokens = AbcParser::parse(Rule::a, "我很漂亮e").unwrap().tokens();
156 assert_eq!(tokens.len(), tokens.count());
157
158 let tokens = AbcParser::parse(Rule::a, "abcde").unwrap().tokens().rev();
159 assert_eq!(tokens.len(), tokens.count());
160
161 let mut tokens = AbcParser::parse(Rule::a, "abcde").unwrap().tokens();
162 let tokens_len = tokens.len();
163 let _ = tokens.next().unwrap();
164 assert_eq!(tokens.count() + 1, tokens_len);
165 }
166 }
167