ethers_core/abi/
tokens.rs

1//! Contract Functions Output types.
2//!
3//! Adapted from [rust-web3](https://github.com/tomusdrw/rust-web3/blob/master/src/contract/tokens.rs).
4
5use crate::{
6    abi::Token,
7    types::{Address, Bytes, H256, I256, U128, U256},
8};
9use arrayvec::ArrayVec;
10use thiserror::Error;
11
12#[derive(Clone, Debug, Error)]
13#[error("{0}")]
14pub struct InvalidOutputType(pub String);
15
16/// Output type possible to deserialize from Contract ABI
17pub trait Detokenize {
18    /// Creates a new instance from parsed ABI tokens.
19    fn from_tokens(tokens: Vec<Token>) -> Result<Self, InvalidOutputType>
20    where
21        Self: Sized;
22}
23
24impl Detokenize for () {
25    fn from_tokens(_: Vec<Token>) -> std::result::Result<Self, InvalidOutputType> {
26        Ok(())
27    }
28}
29
30impl<T: Tokenizable> Detokenize for T {
31    fn from_tokens(mut tokens: Vec<Token>) -> Result<Self, InvalidOutputType> {
32        let token = if tokens.len() == 1 { tokens.pop().unwrap() } else { Token::Tuple(tokens) };
33        Self::from_token(token)
34    }
35}
36
37/// Convert types into [`Token`]s.
38pub trait Tokenize {
39    /// Converts `self` into a `Vec<Token>`.
40    fn into_tokens(self) -> Vec<Token>;
41}
42
43impl<'a> Tokenize for &'a [Token] {
44    fn into_tokens(self) -> Vec<Token> {
45        let mut tokens = self.to_vec();
46        if tokens.len() == 1 {
47            flatten_token(tokens.pop().unwrap())
48        } else {
49            tokens
50        }
51    }
52}
53
54impl<T: Tokenizable> Tokenize for T {
55    fn into_tokens(self) -> Vec<Token> {
56        flatten_token(self.into_token())
57    }
58}
59
60impl Tokenize for () {
61    fn into_tokens(self) -> Vec<Token> {
62        vec![]
63    }
64}
65
66/// Simplified output type for single value.
67pub trait Tokenizable {
68    /// Converts a `Token` into expected type.
69    fn from_token(token: Token) -> Result<Self, InvalidOutputType>
70    where
71        Self: Sized;
72
73    /// Converts a specified type back into token.
74    fn into_token(self) -> Token;
75}
76
77macro_rules! impl_tuples {
78    ($num:expr, $( $ty:ident : $no:tt ),+ $(,)?) => {
79        impl<$( $ty ),+> Tokenizable for ($( $ty, )+)
80        where
81            $(
82                $ty: Tokenizable,
83            )+
84        {
85            fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
86                match token {
87                    Token::Tuple(tokens) if tokens.len() == $num => {
88                        let mut it = tokens.into_iter();
89                        // SAFETY: length checked above
90                        unsafe {
91                            Ok(($(
92                                <$ty as Tokenizable>::from_token(it.next().unwrap_unchecked())?,
93                            )+))
94                        }
95                    },
96                    other => Err(InvalidOutputType(format!(
97                        concat!(
98                            "Expected `Tuple` of length ",
99                            stringify!($num),
100                            ", got {:?}",
101                        ),
102                        other,
103                    ))),
104                }
105            }
106
107            fn into_token(self) -> Token {
108                Token::Tuple(vec![
109                    $( self.$no.into_token(), )+
110                ])
111            }
112        }
113    }
114}
115
116impl_tuples!(1, A:0, );
117impl_tuples!(2, A:0, B:1, );
118impl_tuples!(3, A:0, B:1, C:2, );
119impl_tuples!(4, A:0, B:1, C:2, D:3, );
120impl_tuples!(5, A:0, B:1, C:2, D:3, E:4, );
121impl_tuples!(6, A:0, B:1, C:2, D:3, E:4, F:5, );
122impl_tuples!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, );
123impl_tuples!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, );
124impl_tuples!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, );
125impl_tuples!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, );
126impl_tuples!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, );
127impl_tuples!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, );
128impl_tuples!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, );
129impl_tuples!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, );
130impl_tuples!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, );
131impl_tuples!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, );
132impl_tuples!(17, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q:16,);
133impl_tuples!(18, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q:16, R:17,);
134impl_tuples!(19, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q:16, R:17, S:18,);
135impl_tuples!(20, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q:16, R:17, S:18, T:19,);
136impl_tuples!(21, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q:16, R:17, S:18, T:19, U:20,);
137
138impl Tokenizable for Token {
139    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
140        Ok(token)
141    }
142
143    fn into_token(self) -> Token {
144        self
145    }
146}
147
148impl Tokenizable for String {
149    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
150        match token {
151            Token::String(s) => Ok(s),
152            other => Err(InvalidOutputType(format!("Expected `String`, got {other:?}"))),
153        }
154    }
155
156    fn into_token(self) -> Token {
157        Token::String(self)
158    }
159}
160
161impl Tokenizable for Bytes {
162    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
163        match token {
164            Token::Bytes(s) => Ok(s.into()),
165            other => Err(InvalidOutputType(format!("Expected `Bytes`, got {other:?}"))),
166        }
167    }
168
169    fn into_token(self) -> Token {
170        Token::Bytes(self.to_vec())
171    }
172}
173
174impl Tokenizable for bytes::Bytes {
175    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
176        match token {
177            Token::Bytes(s) => Ok(s.into()),
178            other => Err(InvalidOutputType(format!("Expected `Bytes`, got {other:?}"))),
179        }
180    }
181
182    fn into_token(self) -> Token {
183        Token::Bytes(self.to_vec())
184    }
185}
186
187impl Tokenizable for H256 {
188    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
189        match token {
190            Token::FixedBytes(mut s) => {
191                if s.len() != 32 {
192                    return Err(InvalidOutputType(format!("Expected `H256`, got {s:?}")))
193                }
194                let mut data = [0; 32];
195                for (idx, val) in s.drain(..).enumerate() {
196                    data[idx] = val;
197                }
198                Ok(data.into())
199            }
200            other => Err(InvalidOutputType(format!("Expected `H256`, got {other:?}"))),
201        }
202    }
203
204    fn into_token(self) -> Token {
205        Token::FixedBytes(self.as_ref().to_vec())
206    }
207}
208
209impl Tokenizable for Address {
210    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
211        match token {
212            Token::Address(data) => Ok(data),
213            other => Err(InvalidOutputType(format!("Expected `Address`, got {other:?}"))),
214        }
215    }
216
217    fn into_token(self) -> Token {
218        Token::Address(self)
219    }
220}
221
222impl Tokenizable for bool {
223    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
224        match token {
225            Token::Bool(data) => Ok(data),
226            other => Err(InvalidOutputType(format!("Expected `bool`, got {other:?}"))),
227        }
228    }
229    fn into_token(self) -> Token {
230        Token::Bool(self)
231    }
232}
233
234macro_rules! eth_uint_tokenizable {
235    ($uint: ident, $name: expr) => {
236        impl Tokenizable for $uint {
237            fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
238                match token {
239                    Token::Int(data) | Token::Uint(data) => {
240                        Ok(::std::convert::TryInto::try_into(data).unwrap())
241                    }
242                    other => {
243                        Err(InvalidOutputType(format!("Expected `{}`, got {:?}", $name, other))
244                            .into())
245                    }
246                }
247            }
248
249            fn into_token(self) -> Token {
250                Token::Uint(self.into())
251            }
252        }
253    };
254}
255
256eth_uint_tokenizable!(U256, "U256");
257eth_uint_tokenizable!(U128, "U128");
258
259macro_rules! int_tokenizable {
260    ($int: ident, $token: ident) => {
261        impl Tokenizable for $int {
262            fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
263                match token {
264                    Token::Int(data) | Token::Uint(data) => Ok(data.low_u128() as _),
265                    other => Err(InvalidOutputType(format!(
266                        "Expected `{}`, got {:?}",
267                        stringify!($int),
268                        other
269                    ))),
270                }
271            }
272
273            fn into_token(self) -> Token {
274                // this should get optimized away by the compiler for unsigned integers
275                #[allow(unused_comparisons)]
276                let data = if self < 0 {
277                    // NOTE: Rust does sign extension when converting from a
278                    // signed integer to an unsigned integer, so:
279                    // `-1u8 as u128 == u128::max_value()`
280                    U256::from(self as u128) | U256([0, 0, u64::max_value(), u64::max_value()])
281                } else {
282                    self.into()
283                };
284                Token::$token(data)
285            }
286        }
287    };
288}
289
290int_tokenizable!(i8, Int);
291int_tokenizable!(i16, Int);
292int_tokenizable!(i32, Int);
293int_tokenizable!(i64, Int);
294int_tokenizable!(i128, Int);
295int_tokenizable!(u8, Uint);
296int_tokenizable!(u16, Uint);
297int_tokenizable!(u32, Uint);
298int_tokenizable!(u64, Uint);
299int_tokenizable!(u128, Uint);
300
301impl Tokenizable for Vec<u8> {
302    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
303        match token {
304            Token::Bytes(data) => Ok(data),
305            Token::Array(data) => data.into_iter().map(u8::from_token).collect(),
306            Token::FixedBytes(data) => Ok(data),
307            other => Err(InvalidOutputType(format!("Expected `bytes`, got {other:?}"))),
308        }
309    }
310
311    fn into_token(self) -> Token {
312        Token::Array(self.into_iter().map(Tokenizable::into_token).collect())
313    }
314}
315
316impl<T: TokenizableItem> Tokenizable for Vec<T> {
317    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
318        match token {
319            Token::FixedArray(tokens) | Token::Array(tokens) => {
320                tokens.into_iter().map(Tokenizable::from_token).collect()
321            }
322            other => Err(InvalidOutputType(format!("Expected `Array`, got {other:?}"))),
323        }
324    }
325
326    fn into_token(self) -> Token {
327        Token::Array(self.into_iter().map(Tokenizable::into_token).collect())
328    }
329}
330
331impl<const N: usize> Tokenizable for [u8; N] {
332    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
333        match token {
334            Token::FixedBytes(bytes) => {
335                if bytes.len() != N {
336                    return Err(InvalidOutputType(format!(
337                        "Expected `FixedBytes({})`, got FixedBytes({})",
338                        N,
339                        bytes.len()
340                    )))
341                }
342
343                let mut arr = [0; N];
344                arr.copy_from_slice(&bytes);
345                Ok(arr)
346            }
347            other => Err(InvalidOutputType(format!("Expected `FixedBytes({N})`, got {other:?}"))),
348        }
349    }
350
351    fn into_token(self) -> Token {
352        Token::FixedBytes(self.to_vec())
353    }
354}
355
356impl<T: TokenizableItem + Clone, const N: usize> Tokenizable for [T; N] {
357    fn from_token(token: Token) -> Result<Self, InvalidOutputType> {
358        match token {
359            Token::FixedArray(tokens) => {
360                if tokens.len() != N {
361                    return Err(InvalidOutputType(format!(
362                        "Expected `FixedArray({})`, got FixedArray({})",
363                        N,
364                        tokens.len()
365                    )))
366                }
367
368                let mut arr = ArrayVec::<T, N>::new();
369                let mut it = tokens.into_iter().map(T::from_token);
370                for _ in 0..N {
371                    arr.push(it.next().expect("Length validated in guard; qed")?);
372                }
373                // Can't use expect here because [T; N]: Debug is not satisfied.
374                match arr.into_inner() {
375                    Ok(arr) => Ok(arr),
376                    Err(_) => panic!("All elements inserted so the array is full; qed"),
377                }
378            }
379            other => Err(InvalidOutputType(format!("Expected `FixedArray({N})`, got {other:?}"))),
380        }
381    }
382
383    fn into_token(self) -> Token {
384        Token::FixedArray(ArrayVec::from(self).into_iter().map(T::into_token).collect())
385    }
386}
387
388/// Marker trait for `Tokenizable` types that are can tokenized to and from a `Token::Array` and
389/// `Token:FixedArray`.
390pub trait TokenizableItem: Tokenizable {}
391
392macro_rules! tokenizable_item {
393    ($($type: ty,)*) => {
394        $(
395            impl TokenizableItem for $type {}
396        )*
397    };
398}
399
400tokenizable_item! {
401    Token, String, Address, H256, U256, I256, U128, bool, Vec<u8>,
402    i8, i16, i32, i64, i128, u16, u32, u64, u128, Bytes, bytes::Bytes,
403}
404
405impl<T: TokenizableItem> TokenizableItem for Vec<T> {}
406
407impl<const N: usize> TokenizableItem for [u8; N] {}
408
409impl<T: TokenizableItem + Clone, const N: usize> TokenizableItem for [T; N] {}
410
411macro_rules! impl_tokenizable_item_tuple {
412    ($( $ty:ident ),+ $(,)?) => {
413        impl<$( $ty ),+> TokenizableItem for ($( $ty, )+)
414        where
415            $(
416                $ty: Tokenizable,
417            )+
418        {}
419    }
420}
421
422impl_tokenizable_item_tuple!(A,);
423impl_tokenizable_item_tuple!(A, B,);
424impl_tokenizable_item_tuple!(A, B, C,);
425impl_tokenizable_item_tuple!(A, B, C, D,);
426impl_tokenizable_item_tuple!(A, B, C, D, E,);
427impl_tokenizable_item_tuple!(A, B, C, D, E, F,);
428impl_tokenizable_item_tuple!(A, B, C, D, E, F, G,);
429impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H,);
430impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I,);
431impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J,);
432impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K,);
433impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L,);
434impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M,);
435impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N,);
436impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O,);
437impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P,);
438impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q,);
439impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R,);
440impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S,);
441impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T,);
442impl_tokenizable_item_tuple!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U,);
443
444/// Helper for flattening non-nested tokens into their inner types;
445///
446/// e.g. `(A, B, C)` would get tokenized to `Tuple([A, B, C])` when in fact we need `[A, B, C]`.
447#[inline]
448fn flatten_token(token: Token) -> Vec<Token> {
449    // flatten the tokens if required and there is no nesting
450    match token {
451        Token::Tuple(inner) => inner,
452        token => vec![token],
453    }
454}
455
456#[cfg(test)]
457mod tests {
458    use super::*;
459
460    fn assert_detokenize<T: Detokenize>() -> T {
461        unimplemented!()
462    }
463
464    #[test]
465    #[ignore]
466    fn should_be_able_to_compile() {
467        let _tokens: Vec<Token> = assert_detokenize();
468        let _uint: U256 = assert_detokenize();
469        let _address: Address = assert_detokenize();
470        let _string: String = assert_detokenize();
471        let _bool: bool = assert_detokenize();
472        let _bytes: Vec<u8> = assert_detokenize();
473
474        let _pair: (U256, bool) = assert_detokenize();
475        let _vec: Vec<U256> = assert_detokenize();
476        let _array: [U256; 4] = assert_detokenize();
477        let _bytes: Vec<[[u8; 1]; 64]> = assert_detokenize();
478
479        let _mixed: (Vec<Vec<u8>>, [U256; 4], Vec<U256>, U256) = assert_detokenize();
480
481        let _ints: (i16, i32, i64, i128) = assert_detokenize();
482        let _uints: (u16, u32, u64, u128) = assert_detokenize();
483
484        let _tuple: (Address, Vec<Vec<u8>>) = assert_detokenize();
485        let _vec_of_tuple: Vec<(Address, String)> = assert_detokenize();
486        #[allow(clippy::type_complexity)]
487        let _vec_of_tuple_5: Vec<(Address, Vec<Vec<u8>>, String, U256, bool)> = assert_detokenize();
488    }
489
490    #[test]
491    fn nested_tokenization() {
492        let x = (1u64, (2u64, 3u64));
493        let tokens = x.into_tokens();
494        assert_eq!(
495            tokens,
496            vec![
497                Token::Uint(1.into()),
498                Token::Tuple(vec![Token::Uint(2.into()), Token::Uint(3.into())])
499            ]
500        );
501
502        let x = (1u64, 2u64);
503        let tokens = x.into_tokens();
504        assert_eq!(tokens, vec![Token::Uint(1.into()), Token::Uint(2.into()),]);
505    }
506
507    #[test]
508    fn should_decode_array_of_fixed_bytes() {
509        // byte[8][]
510        let tokens = vec![Token::FixedArray(vec![
511            Token::FixedBytes(vec![1]),
512            Token::FixedBytes(vec![2]),
513            Token::FixedBytes(vec![3]),
514            Token::FixedBytes(vec![4]),
515            Token::FixedBytes(vec![5]),
516            Token::FixedBytes(vec![6]),
517            Token::FixedBytes(vec![7]),
518            Token::FixedBytes(vec![8]),
519        ])];
520        let data: [[u8; 1]; 8] = Detokenize::from_tokens(tokens).unwrap();
521        assert_eq!(data[0][0], 1);
522        assert_eq!(data[1][0], 2);
523        assert_eq!(data[2][0], 3);
524        assert_eq!(data[7][0], 8);
525    }
526
527    #[test]
528    fn should_sign_extend_negative_integers() {
529        assert_eq!((-1i8).into_token(), Token::Int(U256::MAX));
530        assert_eq!((-2i16).into_token(), Token::Int(U256::MAX - 1));
531        assert_eq!((-3i32).into_token(), Token::Int(U256::MAX - 2));
532        assert_eq!((-4i64).into_token(), Token::Int(U256::MAX - 3));
533        assert_eq!((-5i128).into_token(), Token::Int(U256::MAX - 4));
534    }
535
536    #[test]
537    fn should_detokenize() {
538        // handle tuple of one element
539        let tokens = vec![Token::FixedBytes(vec![1, 2, 3, 4]), Token::Bool(true)];
540        let tokens = vec![Token::Tuple(tokens)];
541        let data: ([u8; 4], bool) = Detokenize::from_tokens(tokens).unwrap();
542        assert_eq!(data.0[0], 1);
543        assert_eq!(data.0[1], 2);
544        assert_eq!(data.0[2], 3);
545        assert_eq!(data.0[3], 4);
546        assert!(data.1);
547
548        // handle vector of more than one elements
549        let tokens = vec![Token::Bool(false), Token::Uint(U256::from(13u8))];
550        let data: (bool, u8) = Detokenize::from_tokens(tokens).unwrap();
551        assert!(!data.0);
552        assert_eq!(data.1, 13u8);
553
554        // handle more than two tuples
555        let tokens1 = vec![Token::FixedBytes(vec![1, 2, 3, 4]), Token::Bool(true)];
556        let tokens2 = vec![Token::Bool(false), Token::Uint(U256::from(13u8))];
557        let tokens = vec![Token::Tuple(tokens1), Token::Tuple(tokens2)];
558        let data: (([u8; 4], bool), (bool, u8)) = Detokenize::from_tokens(tokens).unwrap();
559        assert_eq!((data.0).0[0], 1);
560        assert_eq!((data.0).0[1], 2);
561        assert_eq!((data.0).0[2], 3);
562        assert_eq!((data.0).0[3], 4);
563        assert!((data.0).1);
564        assert!(!(data.1).0);
565        assert_eq!((data.1).1, 13u8);
566
567        // error if no tokens in the vector
568        let tokens = vec![];
569        let data: Result<U256, InvalidOutputType> = Detokenize::from_tokens(tokens);
570        assert!(data.is_err());
571    }
572}