1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
//! This crate provides the `color_from_hex!` macro for converting RGB and RGBA hexadecimal string literals
//! to a byte array at compile time.
//!
//! It accepts the following characters in the input string:
//!
//! - `'0'...'9'`, `'a'...'f'`, `'A'...'F'` — hex characters which will be used
//!     in construction of the output byte array
//! - `' '`, `'\r'`, `'\n'`, `'\t'` — formatting characters which will be
//!     ignored
//!
//! # Examples
//! ```
//! # use color_hex::color_from_hex;
//!
//! // The macro can be used in const context
//! const COLOR: [u8; 3] = color_from_hex!("010203");
//! # fn main() {
//! assert_eq!(COLOR, [1, 2, 3]);
//!
//! // It understands both upper and lower hex values
//! assert_eq!(color_from_hex!("a1 b2 c3 d4"), [0xA1, 0xB2, 0xC3, 0xD4]);
//! assert_eq!(color_from_hex!("E5 E6 90 92"), [0xE5, 0xE6, 0x90, 0x92]);
//! assert_eq!(color_from_hex!("0a0B0C"), [10, 11, 12]);
//!
//! // It can tolerate a leading '#' or none
//! assert_eq!(color_from_hex!("#4c4c4c"), color_from_hex!("4c4c4c"))
//! # }
//! ```

extern crate proc_macro;

use std::vec::IntoIter;

use proc_macro::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};

/// Strips any outer `Delimiter::None` groups from the input,
/// returning a `TokenStream` consisting of the innermost
/// non-empty-group `TokenTree`.
/// This is used to handle a proc macro being invoked
/// by a `macro_rules!` expansion.
/// See https://github.com/rust-lang/rust/issues/72545 for background
fn ignore_groups(mut input: TokenStream) -> TokenStream {
    let mut tokens = input.clone().into_iter();
    loop {
        if let Some(TokenTree::Group(group)) = tokens.next() {
            if group.delimiter() == Delimiter::None {
                input = group.stream();
                continue;
            }
        }
        return input;
    }
}

struct TokenTreeIter {
    buf: IntoIter<char>,
    is_punct: bool,
}

impl TokenTreeIter {
    /// Constructs a new `TokenTreeIter` from a given `proc_macro::Literal`.
    ///
    /// # Panics
    /// This panics if the given `Literal` is not a string literal, or if it is not of the correct
    /// length.
    ///
    fn new(input: Literal) -> Self {
        let mut buf: Vec<char> = input.to_string().chars().collect();

        match buf.as_slice() {
            ['"', .., '"'] => (),
            _ => panic!("expected string literal, got `{}`", input),
        };
        buf.pop();
        // Remove the leading '"'
        buf.remove(0);

        // Check to see if this begins with a # character
        if let Some(&c) = buf.first() {
            // Skip it for parsing
            if c == '#' {
                buf.remove(0);
            }
        }

        Self {
            buf: buf.into_iter(),
            is_punct: false,
        }
    }

    /// Parses a single hex character (a-f/A-F/0-9) as a `u8` from the `TokenTreeIter`'s
    /// internal buffer, ignoring whitespace.
    ///
    /// # Panics
    /// This panics if a non-hex, non-whitespace character is encountered.
    fn next_hex_val(&mut self) -> Option<u8> {
        loop {
            let v = self.buf.next()?;
            let n = match v {
                '0'..='9' => v as u8 - 48,
                'A'..='F' => v as u8 - 55,
                'a'..='f' => v as u8 - 87,
                ' ' | '\r' | '\n' | '\t' => continue,
                c if c.is_ascii() => panic!("encountered invalid character: `{}`", v as char),
                _ => panic!("encountered invalid non-ASCII character"),
            };
            return Some(n);
        }
    }
}

impl Iterator for TokenTreeIter {
    type Item = TokenTree;

    /// Produces hex values (as `u8` literals) parsed from the `TokenTreeIter`'s
    /// internal buffer, alternating with commas to separate the elements of the
    /// generated array of bytes.
    ///
    /// The byte array can either be 3 elements long for RGB, or 4 for RGBA
    ///
    /// # Panics
    /// This panics if the internal buffer contains an even number of hex characters
    fn next(&mut self) -> Option<TokenTree> {
        let v = if self.is_punct {
            TokenTree::Punct(Punct::new(',', Spacing::Alone))
        } else {
            let p1 = self.next_hex_val()?;
            let p2 = match self.next_hex_val() {
                Some(v) => v,
                None => panic!("expected even number of hex characters"),
            };
            let val = (p1 << 4) + p2;
            TokenTree::Literal(Literal::u8_suffixed(val))
        };
        self.is_punct = !self.is_punct;
        Some(v)
    }
}

/// Macro for converting a string literal containing hex-encoded color data
/// into an array of bytes.
#[proc_macro]
pub fn color_from_hex(input: TokenStream) -> TokenStream {
    let mut out_ts = TokenStream::new();

    let mut in_ts = ignore_groups(input).into_iter();

    // Consume only one string literal
    let tt = in_ts.next().expect("macro requires a string literal");
    match tt {
        TokenTree::Literal(literal) => {
            let mut tokens = Vec::new();

            let iter = TokenTreeIter::new(literal);

            for token in iter {
                tokens.push(token);

                if tokens.len() > 8 {
                    panic!("expected a maximum of 8 characters for RGBA, ex: #4c4c4cff");
                }
            }

            if tokens.len() < 6 {
                panic!(
                    "expected a minimum of 6 characters for RGB, ex: #4c4c4c. Tokens: {:#?}",
                    tokens
                );
            }

            out_ts.extend(tokens.into_iter());
        }
        unexpected => panic!("expected string literal, got `{}`", unexpected),
    };

    // Create the final array by grouping the tokens with brackets
    TokenStream::from(TokenTree::Group(Group::new(Delimiter::Bracket, out_ts)))
}