apollo_parser/lexer/
token.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
use crate::TokenKind;
use std::fmt;

/// A token generated by the lexer.
#[derive(Clone, PartialEq, Eq)]
pub struct Token<'a> {
    pub(crate) kind: TokenKind,
    pub(crate) data: &'a str,
    pub(crate) index: usize,
}

impl<'a> Token<'a> {
    /// Returns the kind of token.
    pub fn kind(&self) -> TokenKind {
        self.kind
    }

    /// Returns the source text for this token.
    pub fn data(&self) -> &'a str {
        self.data
    }

    /// Returns the byte offset of this token in the source text.
    pub fn index(&self) -> usize {
        self.index
    }
}

impl<'a> fmt::Debug for Token<'a> {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        let start = self.index;
        let end = self.index + self.data.len();

        match &self.kind {
            TokenKind::Whitespace => {
                write!(f, "WHITESPACE@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Comment => {
                write!(f, "COMMENT@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Bang => {
                write!(f, "BANG@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Dollar => {
                write!(f, "DOLLAR@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Amp => {
                write!(f, "AMP@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Spread => {
                write!(f, "SPREAD@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Colon => {
                write!(f, "COLON@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Comma => {
                write!(f, "COMMA@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Eq => {
                write!(f, "EQ@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::At => {
                write!(f, "AT@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::LParen => {
                write!(f, "L_PAREN@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::RParen => {
                write!(f, "R_PAREN@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::LBracket => {
                write!(f, "L_BRACKET@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::RBracket => {
                write!(f, "R_BRACKET@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::LCurly => {
                write!(f, "L_CURLY@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::RCurly => {
                write!(f, "R_CURLY@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Pipe => {
                write!(f, "PIPE@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Eof => {
                write!(f, "EOF@{start}:{start}")
            }

            // composite nodes
            TokenKind::Name => {
                write!(f, "NAME@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::StringValue => {
                write!(f, "STRING_VALUE@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Int => {
                write!(f, "INT@{}:{} {:?}", start, end, self.data)
            }
            TokenKind::Float => {
                write!(f, "FLOAT@{}:{} {:?}", start, end, self.data)
            }
        }
    }
}