fuel_pest/iterators/
tokens.rs

1// pest. The Elegant Parser
2// Copyright (c) 2018 Dragoș Tiselice
3//
4// Licensed under the Apache License, Version 2.0
5// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
6// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
7// option. All files in the project carrying such notice may not be copied,
8// modified, or distributed except according to those terms.
9
10use alloc::rc::Rc;
11use alloc::vec::Vec;
12use std::fmt;
13use std::str;
14use std::sync::Arc;
15
16use super::queueable_token::QueueableToken;
17use position;
18use token::Token;
19use RuleType;
20
21/// An iterator over [`Token`]s. It is created by [`Pair::tokens`] and [`Pairs::tokens`].
22///
23/// [`Token`]: ../enum.Token.html
24/// [`Pair::tokens`]: struct.Pair.html#method.tokens
25/// [`Pairs::tokens`]: struct.Pairs.html#method.tokens
26#[derive(Clone)]
27pub struct Tokens<R> {
28    /// # Safety:
29    ///
30    /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
31    queue: Rc<Vec<QueueableToken<R>>>,
32    input: Arc<str>,
33    start: usize,
34    end: usize,
35}
36
37// TODO(safety): QueueableTokens must be valid indices into input.
38pub fn new<R: RuleType>(
39    queue: Rc<Vec<QueueableToken<R>>>,
40    input: Arc<str>,
41    start: usize,
42    end: usize,
43) -> Tokens<R> {
44    if cfg!(debug_assertions) {
45        for tok in queue.iter() {
46            match *tok {
47                QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
48                    assert!(
49                        input.get(input_pos..).is_some(),
50                        "💥 UNSAFE `Tokens` CREATED 💥"
51                    )
52                }
53            }
54        }
55    }
56
57    Tokens {
58        queue,
59        input,
60        start,
61        end,
62    }
63}
64
65impl<R: RuleType> Tokens<R> {
66    fn create_token(&self, index: usize) -> Token<R> {
67        match self.queue[index] {
68            QueueableToken::Start {
69                end_token_index,
70                input_pos,
71            } => {
72                let rule = match self.queue[end_token_index] {
73                    QueueableToken::End { rule, .. } => rule,
74                    _ => unreachable!(),
75                };
76
77                Token::Start {
78                    rule,
79                    // QueueableTokens are safely created.
80                    pos: unsafe {
81                        position::Position::new_unchecked(self.input.clone(), input_pos)
82                    },
83                }
84            }
85            QueueableToken::End {
86                rule, input_pos, ..
87            } => {
88                Token::End {
89                    rule,
90                    // QueueableTokens are safely created.
91                    pos: unsafe {
92                        position::Position::new_unchecked(self.input.clone(), input_pos)
93                    },
94                }
95            }
96        }
97    }
98}
99
100impl<R: RuleType> Iterator for Tokens<R> {
101    type Item = Token<R>;
102
103    fn next(&mut self) -> Option<Self::Item> {
104        if self.start >= self.end {
105            return None;
106        }
107
108        let token = self.create_token(self.start);
109
110        self.start += 1;
111
112        Some(token)
113    }
114}
115
116impl<R: RuleType> DoubleEndedIterator for Tokens<R> {
117    fn next_back(&mut self) -> Option<Self::Item> {
118        if self.end <= self.start {
119            return None;
120        }
121
122        let token = self.create_token(self.end - 1);
123
124        self.end -= 1;
125
126        Some(token)
127    }
128}
129
130impl<R: RuleType> fmt::Debug for Tokens<R> {
131    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
132        f.debug_list().entries(self.clone()).finish()
133    }
134}
135
136#[cfg(test)]
137mod tests {
138    use super::super::super::macros::tests::*;
139    use super::super::super::Parser;
140    use super::Token;
141    use alloc::vec::Vec;
142    use std::sync::Arc;
143
144    #[test]
145    fn double_ended_iter_for_tokens() {
146        let pairs = AbcParser::parse(Rule::a, Arc::from("abcde")).unwrap();
147        let mut tokens = pairs.clone().tokens().collect::<Vec<Token<Rule>>>();
148        tokens.reverse();
149        let reverse_tokens = pairs.tokens().rev().collect::<Vec<Token<Rule>>>();
150        assert_eq!(tokens, reverse_tokens);
151    }
152}