fuels_core/traits/
tokenizable.rs

1use fuel_types::{Address, AssetId, ContractId};
2
3use crate::{
4    traits::Parameterize,
5    types::{
6        errors::{error, Result},
7        param_types::ParamType,
8        AsciiString, Bits256, Bytes, RawSlice, SizedAsciiString, StaticStringToken, Token,
9    },
10};
11
12pub trait Tokenizable {
13    /// Converts a `Token` into expected type.
14    fn from_token(token: Token) -> Result<Self>
15    where
16        Self: Sized;
17    /// Converts a specified type back into token.
18    fn into_token(self) -> Token;
19}
20
21impl Tokenizable for Token {
22    fn from_token(token: Token) -> Result<Self> {
23        Ok(token)
24    }
25    fn into_token(self) -> Token {
26        self
27    }
28}
29
30impl Tokenizable for Bits256 {
31    fn from_token(token: Token) -> Result<Self>
32    where
33        Self: Sized,
34    {
35        match token {
36            Token::B256(data) => Ok(Bits256(data)),
37            _ => Err(error!(
38                Other,
39                "`Bits256` cannot be constructed from token {token}"
40            )),
41        }
42    }
43
44    fn into_token(self) -> Token {
45        Token::B256(self.0)
46    }
47}
48
49impl<T: Tokenizable> Tokenizable for Vec<T> {
50    fn from_token(token: Token) -> Result<Self>
51    where
52        Self: Sized,
53    {
54        if let Token::Vector(tokens) = token {
55            tokens.into_iter().map(Tokenizable::from_token).collect()
56        } else {
57            Err(error!(
58                Other,
59                "`Vec::from_token` must only be given a `Token::Vector`. Got: `{token}`"
60            ))
61        }
62    }
63
64    fn into_token(self) -> Token {
65        let tokens = self.into_iter().map(Tokenizable::into_token).collect();
66        Token::Vector(tokens)
67    }
68}
69
70impl Tokenizable for bool {
71    fn from_token(token: Token) -> Result<Self> {
72        match token {
73            Token::Bool(data) => Ok(data),
74            other => Err(error!(Other, "expected `bool`, got `{:?}`", other)),
75        }
76    }
77    fn into_token(self) -> Token {
78        Token::Bool(self)
79    }
80}
81
82impl Tokenizable for () {
83    fn from_token(token: Token) -> Result<Self>
84    where
85        Self: Sized,
86    {
87        match token {
88            Token::Unit => Ok(()),
89            other => Err(error!(Other, "expected `Unit`, got `{:?}`", other)),
90        }
91    }
92
93    fn into_token(self) -> Token {
94        Token::Unit
95    }
96}
97
98impl Tokenizable for u8 {
99    fn from_token(token: Token) -> Result<Self> {
100        match token {
101            Token::U8(data) => Ok(data),
102            other => Err(error!(Other, "expected `u8`, got `{:?}`", other)),
103        }
104    }
105    fn into_token(self) -> Token {
106        Token::U8(self)
107    }
108}
109
110impl Tokenizable for u16 {
111    fn from_token(token: Token) -> Result<Self> {
112        match token {
113            Token::U16(data) => Ok(data),
114            other => Err(error!(Other, "expected `u16`, got `{:?}`", other)),
115        }
116    }
117    fn into_token(self) -> Token {
118        Token::U16(self)
119    }
120}
121
122impl Tokenizable for u32 {
123    fn from_token(token: Token) -> Result<Self> {
124        match token {
125            Token::U32(data) => Ok(data),
126            other => Err(error!(Other, "expected `u32`, got {:?}", other)),
127        }
128    }
129    fn into_token(self) -> Token {
130        Token::U32(self)
131    }
132}
133
134impl Tokenizable for u64 {
135    fn from_token(token: Token) -> Result<Self> {
136        match token {
137            Token::U64(data) => Ok(data),
138            other => Err(error!(Other, "expected `u64`, got {:?}", other)),
139        }
140    }
141    fn into_token(self) -> Token {
142        Token::U64(self)
143    }
144}
145
146impl Tokenizable for u128 {
147    fn from_token(token: Token) -> Result<Self> {
148        match token {
149            Token::U128(data) => Ok(data),
150            other => Err(error!(Other, "expected `u128`, got {:?}", other)),
151        }
152    }
153    fn into_token(self) -> Token {
154        Token::U128(self)
155    }
156}
157
158impl Tokenizable for RawSlice {
159    fn from_token(token: Token) -> Result<Self>
160    where
161        Self: Sized,
162    {
163        match token {
164            Token::RawSlice(contents) => Ok(Self(contents)),
165            _ => Err(error!(Other,
166                "`RawSlice::from_token` expected a token of the variant `Token::RawSlice`, got: `{token}`"
167            )),
168        }
169    }
170
171    fn into_token(self) -> Token {
172        Token::RawSlice(Vec::from(self))
173    }
174}
175
176impl Tokenizable for Bytes {
177    fn from_token(token: Token) -> Result<Self>
178    where
179        Self: Sized,
180    {
181        match token {
182            Token::Bytes(contents) => Ok(Self(contents)),
183            _ => Err(error!(
184                Other,
185                "`Bytes::from_token` expected a token of the variant `Token::Bytes`, got: `{token}`"
186            )),
187        }
188    }
189
190    fn into_token(self) -> Token {
191        Token::Bytes(Vec::from(self))
192    }
193}
194
195impl Tokenizable for String {
196    fn from_token(token: Token) -> Result<Self>
197    where
198        Self: Sized,
199    {
200        match token {
201            Token::String(string) => Ok(string),
202            _ => Err(error!(
203                Other,
204                "`String::from_token` expected a token of the variant `Token::String`, got: `{token}`"
205            )),
206        }
207    }
208
209    fn into_token(self) -> Token {
210        Token::String(self)
211    }
212}
213
214// Here we implement `Tokenizable` for a given tuple of a given length.
215// This is done this way because we can't use `impl<T> Tokenizable for (T,)`.
216// So we implement `Tokenizable` for each tuple length, covering
217// a reasonable range of tuple lengths.
218macro_rules! impl_tokenizable_tuples {
219    ($num: expr, $( $ty: ident : $no: tt, )+) => {
220        impl<$($ty, )+> Tokenizable for ($($ty,)+) where
221            $(
222                $ty: Tokenizable,
223            )+
224        {
225            fn from_token(token: Token) -> Result<Self> {
226                match token {
227                    Token::Tuple(tokens) => {
228                        let mut it = tokens.into_iter();
229                        let mut next_token = move || {
230                            it.next().ok_or_else(|| {
231                                error!(Other, "ran out of tokens before tuple could be constructed")
232                            })
233                        };
234                        Ok(($(
235                          $ty::from_token(next_token()?)?,
236                        )+))
237                    },
238                    other => Err(error!(Other,
239                        "expected `Tuple`, got `{:?}`",
240                        other
241                    )),
242                }
243            }
244
245            fn into_token(self) -> Token {
246                Token::Tuple(vec![
247                    $( self.$no.into_token(), )+
248                ])
249            }
250        }
251
252    }
253}
254
255// And where we actually implement the `Tokenizable` for tuples
256// from size 1 to size 16.
257impl_tokenizable_tuples!(1, A:0, );
258impl_tokenizable_tuples!(2, A:0, B:1, );
259impl_tokenizable_tuples!(3, A:0, B:1, C:2, );
260impl_tokenizable_tuples!(4, A:0, B:1, C:2, D:3, );
261impl_tokenizable_tuples!(5, A:0, B:1, C:2, D:3, E:4, );
262impl_tokenizable_tuples!(6, A:0, B:1, C:2, D:3, E:4, F:5, );
263impl_tokenizable_tuples!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, );
264impl_tokenizable_tuples!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, );
265impl_tokenizable_tuples!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, );
266impl_tokenizable_tuples!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, );
267impl_tokenizable_tuples!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, );
268impl_tokenizable_tuples!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, );
269impl_tokenizable_tuples!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, );
270impl_tokenizable_tuples!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, );
271impl_tokenizable_tuples!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, );
272impl_tokenizable_tuples!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, );
273
274impl Tokenizable for ContractId {
275    fn from_token(token: Token) -> Result<Self>
276    where
277        Self: Sized,
278    {
279        if let Token::Struct(tokens) = token {
280            if let [Token::B256(data)] = tokens.as_slice() {
281                Ok(ContractId::from(*data))
282            } else {
283                Err(error!(
284                    Other,
285                    "`ContractId` expected one `Token::B256`, got `{tokens:?}`"
286                ))
287            }
288        } else {
289            Err(error!(
290                Other,
291                "`ContractId` expected `Token::Struct` got `{token:?}`"
292            ))
293        }
294    }
295
296    fn into_token(self) -> Token {
297        let underlying_data: &[u8; 32] = &self;
298        Token::Struct(vec![Bits256(*underlying_data).into_token()])
299    }
300}
301
302impl Tokenizable for Address {
303    fn from_token(token: Token) -> Result<Self>
304    where
305        Self: Sized,
306    {
307        if let Token::Struct(tokens) = token {
308            if let [Token::B256(data)] = tokens.as_slice() {
309                Ok(Address::from(*data))
310            } else {
311                Err(error!(
312                    Other,
313                    "`Address` expected one `Token::B256`, got `{tokens:?}`"
314                ))
315            }
316        } else {
317            Err(error!(
318                Other,
319                "`Address` expected `Token::Struct` got `{token:?}`"
320            ))
321        }
322    }
323
324    fn into_token(self) -> Token {
325        let underlying_data: &[u8; 32] = &self;
326
327        Token::Struct(vec![Bits256(*underlying_data).into_token()])
328    }
329}
330
331impl Tokenizable for AssetId {
332    fn from_token(token: Token) -> Result<Self>
333    where
334        Self: Sized,
335    {
336        if let Token::Struct(tokens) = token {
337            if let [Token::B256(data)] = tokens.as_slice() {
338                Ok(AssetId::from(*data))
339            } else {
340                Err(error!(
341                    Other,
342                    "`AssetId` expected one `Token::B256`, got `{tokens:?}`"
343                ))
344            }
345        } else {
346            Err(error!(
347                Other,
348                "`AssetId` expected `Token::Struct` got `{token:?}`"
349            ))
350        }
351    }
352
353    fn into_token(self) -> Token {
354        let underlying_data: &[u8; 32] = &self;
355        Token::Struct(vec![Bits256(*underlying_data).into_token()])
356    }
357}
358
359impl<T> Tokenizable for Option<T>
360where
361    T: Tokenizable + Parameterize,
362{
363    fn from_token(token: Token) -> Result<Self> {
364        if let Token::Enum(enum_selector) = token {
365            match *enum_selector {
366                (0, _, _) => Ok(None),
367                (1, token, _) => Ok(Option::<T>::Some(T::from_token(token)?)),
368                (_, _, _) => Err(error!(
369                    Other,
370                    "could not construct `Option` from `enum_selector`. Received: `{:?}`",
371                    enum_selector
372                )),
373            }
374        } else {
375            Err(error!(
376                Other,
377                "could not construct `Option` from token. Received: `{token:?}`"
378            ))
379        }
380    }
381    fn into_token(self) -> Token {
382        let (dis, tok) = match self {
383            None => (0, Token::Unit),
384            Some(value) => (1, value.into_token()),
385        };
386        if let ParamType::Enum { enum_variants, .. } = Self::param_type() {
387            let selector = (dis, tok, enum_variants);
388            Token::Enum(Box::new(selector))
389        } else {
390            panic!("should never happen as `Option::param_type()` returns valid Enum variants");
391        }
392    }
393}
394
395impl<T, E> Tokenizable for std::result::Result<T, E>
396where
397    T: Tokenizable + Parameterize,
398    E: Tokenizable + Parameterize,
399{
400    fn from_token(token: Token) -> Result<Self> {
401        if let Token::Enum(enum_selector) = token {
402            match *enum_selector {
403                (0, token, _) => Ok(std::result::Result::<T, E>::Ok(T::from_token(token)?)),
404                (1, token, _) => Ok(std::result::Result::<T, E>::Err(E::from_token(token)?)),
405                (_, _, _) => Err(error!(
406                    Other,
407                    "could not construct `Result` from `enum_selector`. Received: `{:?}`",
408                    enum_selector
409                )),
410            }
411        } else {
412            Err(error!(
413                Other,
414                "could not construct `Result` from token. Received: `{token:?}`"
415            ))
416        }
417    }
418    fn into_token(self) -> Token {
419        let (dis, tok) = match self {
420            Ok(value) => (0, value.into_token()),
421            Err(value) => (1, value.into_token()),
422        };
423        if let ParamType::Enum { enum_variants, .. } = Self::param_type() {
424            let selector = (dis, tok, enum_variants);
425            Token::Enum(Box::new(selector))
426        } else {
427            panic!("should never happen as Result::param_type() returns valid Enum variants");
428        }
429    }
430}
431
432impl<const SIZE: usize, T: Tokenizable> Tokenizable for [T; SIZE] {
433    fn from_token(token: Token) -> Result<Self>
434    where
435        Self: Sized,
436    {
437        let gen_error = |reason| error!(Other, "constructing an array of size {SIZE}: {reason}");
438
439        match token {
440            Token::Array(elements) => {
441                let len = elements.len();
442                if len != SIZE {
443                    return Err(gen_error(format!(
444                        "`Token::Array` has wrong number of elements: {len}"
445                    )));
446                }
447
448                let detokenized = elements
449                    .into_iter()
450                    .map(Tokenizable::from_token)
451                    .collect::<Result<Vec<T>>>()
452                    .map_err(|err| {
453                        gen_error(format!(", not all elements could be detokenized: {err}"))
454                    })?;
455
456                Ok(detokenized.try_into().unwrap_or_else(|_| {
457                    panic!("this should never fail since we're checking the length beforehand")
458                }))
459            }
460            _ => Err(gen_error(format!("expected a `Token::Array`, got {token}"))),
461        }
462    }
463
464    fn into_token(self) -> Token {
465        Token::Array(self.map(Tokenizable::into_token).to_vec())
466    }
467}
468
469impl<const LEN: usize> Tokenizable for SizedAsciiString<LEN> {
470    fn from_token(token: Token) -> Result<Self>
471    where
472        Self: Sized,
473    {
474        match token {
475            Token::StringArray(contents) => {
476                let expected_len = contents.get_encodable_str()?.len() ;
477                if expected_len!= LEN {
478                    return Err(error!(Other,"`SizedAsciiString<{LEN}>::from_token` got a `Token::StringArray` whose expected length({}) is != {LEN}", expected_len))
479                }
480                Self::new(contents.try_into()?)
481            },
482            _ => {
483                Err(error!(Other,"`SizedAsciiString<{LEN}>::from_token` expected a token of the variant `Token::StringArray`, got: `{token}`"))
484            }
485        }
486    }
487
488    fn into_token(self) -> Token {
489        Token::StringArray(StaticStringToken::new(self.into(), Some(LEN)))
490    }
491}
492
493impl Tokenizable for AsciiString {
494    fn from_token(token: Token) -> Result<Self>
495    where
496        Self: Sized,
497    {
498        match token {
499            Token::StringSlice(contents) => {
500                Self::new(contents.try_into()?)
501            },
502            _ => {
503                Err(error!(Other,"`AsciiString::from_token` expected a token of the variant `Token::StringSlice`, got: `{token}`"))
504            }
505        }
506    }
507
508    fn into_token(self) -> Token {
509        Token::StringSlice(StaticStringToken::new(self.into(), None))
510    }
511}
512
513#[cfg(test)]
514mod tests {
515    use super::*;
516
517    #[test]
518    fn test_from_token_b256() -> Result<()> {
519        let data = [1u8; 32];
520        let token = Token::B256(data);
521
522        let bits256 = Bits256::from_token(token)?;
523
524        assert_eq!(bits256.0, data);
525
526        Ok(())
527    }
528
529    #[test]
530    fn test_into_token_b256() {
531        let bytes = [1u8; 32];
532        let bits256 = Bits256(bytes);
533
534        let token = bits256.into_token();
535
536        assert_eq!(token, Token::B256(bytes));
537    }
538
539    #[test]
540    fn test_from_token_raw_slice() -> Result<()> {
541        let data = vec![42; 11];
542        let token = Token::RawSlice(data.clone());
543
544        let slice = RawSlice::from_token(token)?;
545
546        assert_eq!(slice, data);
547
548        Ok(())
549    }
550
551    #[test]
552    fn test_into_token_raw_slice() {
553        let data = vec![13; 32];
554        let raw_slice_token = Token::RawSlice(data.clone());
555
556        let token = raw_slice_token.into_token();
557
558        assert_eq!(token, Token::RawSlice(data));
559    }
560
561    #[test]
562    fn sized_ascii_string_is_tokenized_correctly() -> Result<()> {
563        let sut = SizedAsciiString::<3>::new("abc".to_string())?;
564
565        let token = sut.into_token();
566
567        match token {
568            Token::StringArray(string_token) => {
569                let contents = string_token.get_encodable_str()?;
570                assert_eq!(contents, "abc");
571            }
572            _ => {
573                panic!("not tokenized correctly! Should have gotten a `Token::String`")
574            }
575        }
576
577        Ok(())
578    }
579
580    #[test]
581    fn sized_ascii_string_is_detokenized_correctly() -> Result<()> {
582        let token = Token::StringArray(StaticStringToken::new("abc".to_string(), Some(3)));
583
584        let sized_ascii_string =
585            SizedAsciiString::<3>::from_token(token).expect("should have succeeded");
586
587        assert_eq!(sized_ascii_string, "abc");
588
589        Ok(())
590    }
591
592    #[test]
593    fn test_into_token_std_string() -> Result<()> {
594        let expected = String::from("hello");
595        let token = Token::String(expected.clone());
596        let detokenized = String::from_token(token.into_token())?;
597
598        assert_eq!(detokenized, expected);
599
600        Ok(())
601    }
602}