fuels_types/traits/
tokenizable.rs

1use fuel_types::{Address, AssetId, ContractId};
2
3use crate::{
4    core::{Bits256, RawSlice, SizedAsciiString, StringToken, Token},
5    errors::{error, Error, Result},
6    param_types::ParamType,
7    traits::Parameterize,
8    Bytes,
9};
10
11pub trait Tokenizable {
12    /// Converts a `Token` into expected type.
13    fn from_token(token: Token) -> Result<Self>
14    where
15        Self: Sized;
16    /// Converts a specified type back into token.
17    fn into_token(self) -> Token;
18}
19
20impl Tokenizable for Token {
21    fn from_token(token: Token) -> Result<Self> {
22        Ok(token)
23    }
24    fn into_token(self) -> Token {
25        self
26    }
27}
28
29impl Tokenizable for Bits256 {
30    fn from_token(token: Token) -> Result<Self>
31    where
32        Self: Sized,
33    {
34        match token {
35            Token::B256(data) => Ok(Bits256(data)),
36            _ => Err(error!(
37                InvalidData,
38                "Bits256 cannot be constructed from token {token}"
39            )),
40        }
41    }
42
43    fn into_token(self) -> Token {
44        Token::B256(self.0)
45    }
46}
47
48impl<T: Tokenizable> Tokenizable for Vec<T> {
49    fn from_token(token: Token) -> Result<Self>
50    where
51        Self: Sized,
52    {
53        if let Token::Vector(tokens) = token {
54            tokens.into_iter().map(Tokenizable::from_token).collect()
55        } else {
56            Err(error!(
57                InvalidData,
58                "Vec::from_token must only be given a Token::Vector. Got: {token}"
59            ))
60        }
61    }
62
63    fn into_token(self) -> Token {
64        let tokens = self.into_iter().map(Tokenizable::into_token).collect();
65        Token::Vector(tokens)
66    }
67}
68
69impl Tokenizable for bool {
70    fn from_token(token: Token) -> Result<Self> {
71        match token {
72            Token::Bool(data) => Ok(data),
73            other => Err(error!(
74                InstantiationError,
75                "Expected `bool`, got {:?}", other
76            )),
77        }
78    }
79    fn into_token(self) -> Token {
80        Token::Bool(self)
81    }
82}
83
84impl Tokenizable for () {
85    fn from_token(token: Token) -> Result<Self>
86    where
87        Self: Sized,
88    {
89        match token {
90            Token::Unit => Ok(()),
91            other => Err(error!(
92                InstantiationError,
93                "Expected `Unit`, got {:?}", other
94            )),
95        }
96    }
97
98    fn into_token(self) -> Token {
99        Token::Unit
100    }
101}
102
103impl Tokenizable for u8 {
104    fn from_token(token: Token) -> Result<Self> {
105        match token {
106            Token::U8(data) => Ok(data),
107            other => Err(error!(InstantiationError, "Expected `u8`, got {:?}", other)),
108        }
109    }
110    fn into_token(self) -> Token {
111        Token::U8(self)
112    }
113}
114
115impl Tokenizable for u16 {
116    fn from_token(token: Token) -> Result<Self> {
117        match token {
118            Token::U16(data) => Ok(data),
119            other => Err(error!(
120                InstantiationError,
121                "Expected `u16`, got {:?}", other
122            )),
123        }
124    }
125    fn into_token(self) -> Token {
126        Token::U16(self)
127    }
128}
129
130impl Tokenizable for u32 {
131    fn from_token(token: Token) -> Result<Self> {
132        match token {
133            Token::U32(data) => Ok(data),
134            other => Err(error!(
135                InstantiationError,
136                "Expected `u32`, got {:?}", other
137            )),
138        }
139    }
140    fn into_token(self) -> Token {
141        Token::U32(self)
142    }
143}
144
145impl Tokenizable for u64 {
146    fn from_token(token: Token) -> Result<Self> {
147        match token {
148            Token::U64(data) => Ok(data),
149            other => Err(error!(
150                InstantiationError,
151                "Expected `u64`, got {:?}", other
152            )),
153        }
154    }
155    fn into_token(self) -> Token {
156        Token::U64(self)
157    }
158}
159
160impl Tokenizable for u128 {
161    fn from_token(token: Token) -> Result<Self> {
162        match token {
163            Token::U128(data) => Ok(data),
164            other => Err(error!(
165                InstantiationError,
166                "Expected `u128`, got {:?}", other
167            )),
168        }
169    }
170    fn into_token(self) -> Token {
171        Token::U128(self)
172    }
173}
174
175impl Tokenizable for RawSlice {
176    fn from_token(token: Token) -> Result<Self>
177    where
178        Self: Sized,
179    {
180        match token {
181            Token::RawSlice(contents) => Ok(Self(contents)),
182            _ => Err(error!(InvalidData,
183                "RawSlice::from_token expected a token of the variant Token::RawSlice, got: {token}"
184            )),
185        }
186    }
187
188    fn into_token(self) -> Token {
189        Token::RawSlice(Vec::from(self))
190    }
191}
192
193impl Tokenizable for Bytes {
194    fn from_token(token: Token) -> Result<Self>
195    where
196        Self: Sized,
197    {
198        match token {
199            Token::Bytes(contents) => Ok(Self(contents)),
200            _ => Err(error!(
201                InvalidData,
202                "Bytes::from_token expected a token of the variant Token::Bytes, got: {token}"
203            )),
204        }
205    }
206
207    fn into_token(self) -> Token {
208        Token::Bytes(Vec::from(self))
209    }
210}
211
212// Here we implement `Tokenizable` for a given tuple of a given length.
213// This is done this way because we can't use `impl<T> Tokenizable for (T,)`.
214// So we implement `Tokenizable` for each tuple length, covering
215// a reasonable range of tuple lengths.
216macro_rules! impl_tokenizable_tuples {
217    ($num: expr, $( $ty: ident : $no: tt, )+) => {
218        impl<$($ty, )+> Tokenizable for ($($ty,)+) where
219            $(
220                $ty: Tokenizable,
221            )+
222        {
223            fn from_token(token: Token) -> Result<Self> {
224                match token {
225                    Token::Tuple(tokens) => {
226                        let mut it = tokens.into_iter();
227                        let mut next_token = move || {
228                            it.next().ok_or_else(|| {
229                                error!(InstantiationError,"Ran out of tokens before tuple could be constructed")
230                            })
231                        };
232                        Ok(($(
233                          $ty::from_token(next_token()?)?,
234                        )+))
235                    },
236                    other => Err(error!(InstantiationError,
237                        "Expected `Tuple`, got {:?}",
238                        other
239                    )),
240                }
241            }
242
243            fn into_token(self) -> Token {
244                Token::Tuple(vec![
245                    $( self.$no.into_token(), )+
246                ])
247            }
248        }
249
250    }
251}
252
253// And where we actually implement the `Tokenizable` for tuples
254// from size 1 to size 16.
255impl_tokenizable_tuples!(1, A:0, );
256impl_tokenizable_tuples!(2, A:0, B:1, );
257impl_tokenizable_tuples!(3, A:0, B:1, C:2, );
258impl_tokenizable_tuples!(4, A:0, B:1, C:2, D:3, );
259impl_tokenizable_tuples!(5, A:0, B:1, C:2, D:3, E:4, );
260impl_tokenizable_tuples!(6, A:0, B:1, C:2, D:3, E:4, F:5, );
261impl_tokenizable_tuples!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, );
262impl_tokenizable_tuples!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, );
263impl_tokenizable_tuples!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, );
264impl_tokenizable_tuples!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, );
265impl_tokenizable_tuples!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, );
266impl_tokenizable_tuples!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, );
267impl_tokenizable_tuples!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, );
268impl_tokenizable_tuples!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, );
269impl_tokenizable_tuples!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, );
270impl_tokenizable_tuples!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, );
271
272impl Tokenizable for ContractId {
273    fn from_token(token: Token) -> Result<Self>
274    where
275        Self: Sized,
276    {
277        if let Token::Struct(tokens) = token {
278            if let [Token::B256(data)] = tokens.as_slice() {
279                Ok(ContractId::from(*data))
280            } else {
281                Err(error!(
282                    InstantiationError,
283                    "ContractId expected one `Token::B256`, got {tokens:?}"
284                ))
285            }
286        } else {
287            Err(error!(
288                InstantiationError,
289                "Address expected `Token::Struct` got {token:?}"
290            ))
291        }
292    }
293
294    fn into_token(self) -> Token {
295        let underlying_data: &[u8; 32] = &self;
296        Token::Struct(vec![Bits256(*underlying_data).into_token()])
297    }
298}
299
300impl Tokenizable for Address {
301    fn from_token(token: Token) -> Result<Self>
302    where
303        Self: Sized,
304    {
305        if let Token::Struct(tokens) = token {
306            if let [Token::B256(data)] = tokens.as_slice() {
307                Ok(Address::from(*data))
308            } else {
309                Err(error!(
310                    InstantiationError,
311                    "Address expected one `Token::B256`, got {tokens:?}"
312                ))
313            }
314        } else {
315            Err(error!(
316                InstantiationError,
317                "Address expected `Token::Struct` got {token:?}"
318            ))
319        }
320    }
321
322    fn into_token(self) -> Token {
323        let underlying_data: &[u8; 32] = &self;
324
325        Token::Struct(vec![Bits256(*underlying_data).into_token()])
326    }
327}
328
329impl Tokenizable for AssetId {
330    fn from_token(token: Token) -> Result<Self>
331    where
332        Self: Sized,
333    {
334        if let Token::Struct(tokens) = token {
335            if let [Token::B256(data)] = tokens.as_slice() {
336                Ok(AssetId::from(*data))
337            } else {
338                Err(error!(
339                    InstantiationError,
340                    "AssetId expected one `Token::B256`, got {tokens:?}"
341                ))
342            }
343        } else {
344            Err(error!(
345                InstantiationError,
346                "AssetId expected `Token::Struct` got {token:?}"
347            ))
348        }
349    }
350
351    fn into_token(self) -> Token {
352        let underlying_data: &[u8; 32] = &self;
353        Token::Struct(vec![Bits256(*underlying_data).into_token()])
354    }
355}
356
357impl<T> Tokenizable for Option<T>
358where
359    T: Tokenizable + Parameterize,
360{
361    fn from_token(token: Token) -> Result<Self> {
362        if let Token::Enum(enum_selector) = token {
363            match *enum_selector {
364                (0u8, _, _) => Ok(None),
365                (1u8, token, _) => Ok(Option::<T>::Some(T::from_token(token)?)),
366                (_, _, _) => Err(error!(
367                    InstantiationError,
368                    "Could not construct Option from enum_selector. Received: {:?}", enum_selector
369                )),
370            }
371        } else {
372            Err(error!(
373                InstantiationError,
374                "Could not construct Option from token. Received: {token:?}"
375            ))
376        }
377    }
378    fn into_token(self) -> Token {
379        let (dis, tok) = match self {
380            None => (0u8, Token::Unit),
381            Some(value) => (1u8, value.into_token()),
382        };
383        if let ParamType::Enum { variants, .. } = Self::param_type() {
384            let selector = (dis, tok, variants);
385            Token::Enum(Box::new(selector))
386        } else {
387            panic!("should never happen as Option::param_type() returns valid Enum variants");
388        }
389    }
390}
391
392impl<T, E> Tokenizable for std::result::Result<T, E>
393where
394    T: Tokenizable + Parameterize,
395    E: Tokenizable + Parameterize,
396{
397    fn from_token(token: Token) -> Result<Self> {
398        if let Token::Enum(enum_selector) = token {
399            match *enum_selector {
400                (0u8, token, _) => Ok(std::result::Result::<T, E>::Ok(T::from_token(token)?)),
401                (1u8, token, _) => Ok(std::result::Result::<T, E>::Err(E::from_token(token)?)),
402                (_, _, _) => Err(error!(
403                    InstantiationError,
404                    "Could not construct Result from enum_selector. Received: {:?}", enum_selector
405                )),
406            }
407        } else {
408            Err(error!(
409                InstantiationError,
410                "Could not construct Result from token. Received: {token:?}"
411            ))
412        }
413    }
414    fn into_token(self) -> Token {
415        let (dis, tok) = match self {
416            Ok(value) => (0u8, value.into_token()),
417            Err(value) => (1u8, value.into_token()),
418        };
419        if let ParamType::Enum { variants, .. } = Self::param_type() {
420            let selector = (dis, tok, variants);
421            Token::Enum(Box::new(selector))
422        } else {
423            panic!("should never happen as Result::param_type() returns valid Enum variants");
424        }
425    }
426}
427
428impl<const SIZE: usize, T: Tokenizable> Tokenizable for [T; SIZE] {
429    fn from_token(token: Token) -> Result<Self>
430    where
431        Self: Sized,
432    {
433        let gen_error = |reason| {
434            error!(
435                InvalidData,
436                "While constructing an array of size {SIZE}: {reason}"
437            )
438        };
439
440        match token {
441            Token::Array(elements) => {
442                let len = elements.len();
443                if len != SIZE {
444                    return Err(gen_error(format!(
445                        "Was given a Token::Array with wrong number of elements: {len}"
446                    )));
447                }
448
449                let detokenized = elements
450                    .into_iter()
451                    .map(Tokenizable::from_token)
452                    .collect::<Result<Vec<T>>>()
453                    .map_err(|err| {
454                        gen_error(format!(", not all elements could be detokenized: {err}"))
455                    })?;
456
457                Ok(detokenized.try_into().unwrap_or_else(|_| {
458                    panic!("This should never fail since we're checking the length beforehand.")
459                }))
460            }
461            _ => Err(gen_error(format!("Expected a Token::Array, got {token}"))),
462        }
463    }
464
465    fn into_token(self) -> Token {
466        Token::Array(self.map(Tokenizable::into_token).to_vec())
467    }
468}
469
470impl<const LEN: usize> Tokenizable for SizedAsciiString<LEN> {
471    fn from_token(token: Token) -> Result<Self>
472    where
473        Self: Sized,
474    {
475        match token {
476            Token::String(contents) => {
477                let expected_len = contents.get_encodable_str()?.len() ;
478                if expected_len!= LEN {
479                    return Err(error!(InvalidData,"SizedAsciiString<{LEN}>::from_token got a Token::String whose expected length({}) is != {LEN}", expected_len))
480                }
481                Self::new(contents.try_into()?)
482            },
483            _ => {
484                Err(error!(InvalidData,"SizedAsciiString<{LEN}>::from_token expected a token of the variant Token::String, got: {token}"))
485            }
486        }
487    }
488
489    fn into_token(self) -> Token {
490        Token::String(StringToken::new(self.into(), LEN))
491    }
492}
493
494#[cfg(test)]
495mod tests {
496    use super::*;
497
498    #[test]
499    fn test_from_token_b256() -> Result<()> {
500        let data = [1u8; 32];
501        let token = Token::B256(data);
502
503        let bits256 = Bits256::from_token(token)?;
504
505        assert_eq!(bits256.0, data);
506
507        Ok(())
508    }
509
510    #[test]
511    fn test_into_token_b256() {
512        let bytes = [1u8; 32];
513        let bits256 = Bits256(bytes);
514
515        let token = bits256.into_token();
516
517        assert_eq!(token, Token::B256(bytes));
518    }
519
520    #[test]
521    fn test_from_token_raw_slice() -> Result<()> {
522        let data = vec![42; 11];
523        let token = Token::RawSlice(data.clone());
524
525        let slice = RawSlice::from_token(token)?;
526
527        assert_eq!(slice, data);
528
529        Ok(())
530    }
531
532    #[test]
533    fn test_into_token_raw_slice() {
534        let data = vec![13; 32];
535        let raw_slice_token = Token::RawSlice(data.clone());
536
537        let token = raw_slice_token.into_token();
538
539        assert_eq!(token, Token::RawSlice(data));
540    }
541
542    #[test]
543    fn sized_ascii_string_is_tokenized_correctly() -> Result<()> {
544        let sut = SizedAsciiString::<3>::new("abc".to_string())?;
545
546        let token = sut.into_token();
547
548        match token {
549            Token::String(string_token) => {
550                let contents = string_token.get_encodable_str()?;
551                assert_eq!(contents, "abc");
552            }
553            _ => {
554                panic!("Not tokenized correctly! Should have gotten a Token::String")
555            }
556        }
557
558        Ok(())
559    }
560
561    #[test]
562    fn sized_ascii_string_is_detokenized_correctly() -> Result<()> {
563        let token = Token::String(StringToken::new("abc".to_string(), 3));
564
565        let sized_ascii_string =
566            SizedAsciiString::<3>::from_token(token).expect("Should have succeeded");
567
568        assert_eq!(sized_ascii_string, "abc");
569
570        Ok(())
571    }
572}