1use crate::{
4 contract::error::Error,
5 types::{Address, Bytes, BytesArray, H256, U128, U256},
6};
7use arrayvec::ArrayVec;
8use ethabi::Token;
9
10pub trait Detokenize {
12 fn from_tokens(tokens: Vec<Token>) -> Result<Self, Error>
14 where
15 Self: Sized;
16}
17
18impl<T: Tokenizable> Detokenize for T {
19 fn from_tokens(mut tokens: Vec<Token>) -> Result<Self, Error> {
20 if tokens.len() != 1 {
21 Err(Error::InvalidOutputType(format!(
22 "Expected single element, got a list: {:?}",
23 tokens
24 )))
25 } else {
26 Self::from_token(tokens.drain(..).next().expect("At least one element in vector; qed"))
27 }
28 }
29}
30
31macro_rules! impl_output {
32 ($num: expr, $( $ty: ident , )+) => {
33 impl<$($ty, )+> Detokenize for ($($ty,)+) where
34 $(
35 $ty: Tokenizable,
36 )+
37 {
38 fn from_tokens(mut tokens: Vec<Token>) -> Result<Self, Error> {
39 if tokens.len() != $num {
40 return Err(Error::InvalidOutputType(format!(
41 "Expected {} elements, got a list of {}: {:?}",
42 $num,
43 tokens.len(),
44 tokens
45 )));
46 }
47 let mut it = tokens.drain(..);
48 Ok(($(
49 $ty::from_token(it.next().expect("All elements are in vector; qed"))?,
50 )+))
51 }
52 }
53 }
54}
55
56impl_output!(1, A,);
57impl_output!(2, A, B,);
58impl_output!(3, A, B, C,);
59impl_output!(4, A, B, C, D,);
60impl_output!(5, A, B, C, D, E,);
61impl_output!(6, A, B, C, D, E, F,);
62impl_output!(7, A, B, C, D, E, F, G,);
63impl_output!(8, A, B, C, D, E, F, G, H,);
64impl_output!(9, A, B, C, D, E, F, G, H, I,);
65impl_output!(10, A, B, C, D, E, F, G, H, I, J,);
66impl_output!(11, A, B, C, D, E, F, G, H, I, J, K,);
67impl_output!(12, A, B, C, D, E, F, G, H, I, J, K, L,);
68impl_output!(13, A, B, C, D, E, F, G, H, I, J, K, L, M,);
69impl_output!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N,);
70impl_output!(15, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O,);
71impl_output!(16, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P,);
72pub trait Tokenize {
74 fn into_tokens(self) -> Vec<Token>;
76}
77
78impl<'a> Tokenize for &'a [Token] {
79 fn into_tokens(self) -> Vec<Token> {
80 self.to_vec()
81 }
82}
83
84impl<T: Tokenizable> Tokenize for T {
85 fn into_tokens(self) -> Vec<Token> {
86 vec![self.into_token()]
87 }
88}
89
90impl Tokenize for () {
91 fn into_tokens(self) -> Vec<Token> {
92 vec![]
93 }
94}
95
96macro_rules! impl_tokens {
97 ($( $ty: ident : $no: tt, )+) => {
98 impl<$($ty, )+> Tokenize for ($($ty,)+) where
99 $(
100 $ty: Tokenizable,
101 )+
102 {
103 fn into_tokens(self) -> Vec<Token> {
104 vec![
105 $( self.$no.into_token(), )+
106 ]
107 }
108 }
109 }
110}
111
112impl_tokens!(A:0, );
113impl_tokens!(A:0, B:1, );
114impl_tokens!(A:0, B:1, C:2, );
115impl_tokens!(A:0, B:1, C:2, D:3, );
116impl_tokens!(A:0, B:1, C:2, D:3, E:4, );
117impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, );
118impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, );
119impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, );
120impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, );
121impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, );
122impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, );
123impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, );
124impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, );
125impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, );
126impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, );
127impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, );
128
129pub trait Tokenizable {
131 fn from_token(token: Token) -> Result<Self, Error>
133 where
134 Self: Sized;
135 fn into_token(self) -> Token;
137}
138
139impl Tokenizable for Token {
140 fn from_token(token: Token) -> Result<Self, Error> {
141 Ok(token)
142 }
143 fn into_token(self) -> Token {
144 self
145 }
146}
147
148impl Tokenizable for String {
149 fn from_token(token: Token) -> Result<Self, Error> {
150 match token {
151 Token::String(s) => Ok(s),
152 other => Err(Error::InvalidOutputType(format!("Expected `String`, got {:?}", other))),
153 }
154 }
155
156 fn into_token(self) -> Token {
157 Token::String(self)
158 }
159}
160
161impl Tokenizable for Bytes {
162 fn from_token(token: Token) -> Result<Self, Error> {
163 match token {
164 Token::Bytes(s) => Ok(s.into()),
165 other => Err(Error::InvalidOutputType(format!("Expected `Bytes`, got {:?}", other))),
166 }
167 }
168
169 fn into_token(self) -> Token {
170 Token::Bytes(self.0)
171 }
172}
173
174impl Tokenizable for H256 {
175 fn from_token(token: Token) -> Result<Self, Error> {
176 match token {
177 Token::FixedBytes(mut s) => {
178 if s.len() != 32 {
179 return Err(Error::InvalidOutputType(format!("Expected `H256`, got {:?}", s)));
180 }
181 let mut data = [0; 32];
182 for (idx, val) in s.drain(..).enumerate() {
183 data[idx] = val;
184 }
185 Ok(data.into())
186 }
187 other => Err(Error::InvalidOutputType(format!("Expected `H256`, got {:?}", other))),
188 }
189 }
190
191 fn into_token(self) -> Token {
192 Token::FixedBytes(self.as_ref().to_vec())
193 }
194}
195
196impl Tokenizable for Address {
197 fn from_token(token: Token) -> Result<Self, Error> {
198 match token {
199 Token::Address(data) => Ok(data),
200 other => Err(Error::InvalidOutputType(format!("Expected `Address`, got {:?}", other))),
201 }
202 }
203
204 fn into_token(self) -> Token {
205 Token::Address(self)
206 }
207}
208
209macro_rules! eth_uint_tokenizable {
210 ($uint: ident, $name: expr) => {
211 impl Tokenizable for $uint {
212 fn from_token(token: Token) -> Result<Self, Error> {
213 match token {
214 Token::Int(data) | Token::Uint(data) => Ok(::std::convert::TryInto::try_into(data).unwrap()),
215 other => Err(Error::InvalidOutputType(format!("Expected `{}`, got {:?}", $name, other)).into()),
216 }
217 }
218
219 fn into_token(self) -> Token {
220 Token::Uint(self.into())
221 }
222 }
223 };
224}
225
226eth_uint_tokenizable!(U256, "U256");
227eth_uint_tokenizable!(U128, "U128");
228
229macro_rules! int_tokenizable {
230 ($int: ident, $token: ident) => {
231 impl Tokenizable for $int {
232 fn from_token(token: Token) -> Result<Self, Error> {
233 match token {
234 Token::Int(data) | Token::Uint(data) => Ok(data.low_u128() as _),
235 other => Err(Error::InvalidOutputType(format!(
236 "Expected `{}`, got {:?}",
237 stringify!($int),
238 other
239 ))),
240 }
241 }
242
243 fn into_token(self) -> Token {
244 #[allow(unused_comparisons)]
246 let data = if self < 0 {
247 U256::from(self as u128) | U256([0, 0, u64::max_value(), u64::max_value()])
251 } else {
252 self.into()
253 };
254 Token::$token(data)
255 }
256 }
257 };
258}
259
260int_tokenizable!(i8, Int);
261int_tokenizable!(i16, Int);
262int_tokenizable!(i32, Int);
263int_tokenizable!(i64, Int);
264int_tokenizable!(i128, Int);
265int_tokenizable!(u8, Uint);
266int_tokenizable!(u16, Uint);
267int_tokenizable!(u32, Uint);
268int_tokenizable!(u64, Uint);
269int_tokenizable!(u128, Uint);
270
271impl Tokenizable for bool {
272 fn from_token(token: Token) -> Result<Self, Error> {
273 match token {
274 Token::Bool(data) => Ok(data),
275 other => Err(Error::InvalidOutputType(format!("Expected `bool`, got {:?}", other))),
276 }
277 }
278 fn into_token(self) -> Token {
279 Token::Bool(self)
280 }
281}
282
283pub trait TokenizableItem: Tokenizable {}
286
287macro_rules! tokenizable_item {
288 ($($type: ty,)*) => {
289 $(
290 impl TokenizableItem for $type {}
291 )*
292 };
293}
294
295tokenizable_item! {
296 Token, String, Address, H256, U256, U128, bool, BytesArray, Vec<u8>,
297 i8, i16, i32, i64, i128, u16, u32, u64, u128,
298}
299
300impl Tokenizable for BytesArray {
301 fn from_token(token: Token) -> Result<Self, Error> {
302 match token {
303 Token::FixedArray(tokens) | Token::Array(tokens) => {
304 let bytes = tokens
305 .into_iter()
306 .map(Tokenizable::from_token)
307 .collect::<Result<Vec<u8>, Error>>()?;
308 Ok(Self(bytes))
309 }
310 other => Err(Error::InvalidOutputType(format!("Expected `Array`, got {:?}", other))),
311 }
312 }
313
314 fn into_token(self) -> Token {
315 Token::Array(self.0.into_iter().map(Tokenizable::into_token).collect())
316 }
317}
318
319impl Tokenizable for Vec<u8> {
320 fn from_token(token: Token) -> Result<Self, Error> {
321 match token {
322 Token::Bytes(data) => Ok(data),
323 Token::FixedBytes(data) => Ok(data),
324 other => Err(Error::InvalidOutputType(format!("Expected `bytes`, got {:?}", other))),
325 }
326 }
327 fn into_token(self) -> Token {
328 Token::Bytes(self)
329 }
330}
331
332impl<T: TokenizableItem> Tokenizable for Vec<T> {
333 fn from_token(token: Token) -> Result<Self, Error> {
334 match token {
335 Token::FixedArray(tokens) | Token::Array(tokens) => {
336 tokens.into_iter().map(Tokenizable::from_token).collect()
337 }
338 other => Err(Error::InvalidOutputType(format!("Expected `Array`, got {:?}", other))),
339 }
340 }
341
342 fn into_token(self) -> Token {
343 Token::Array(self.into_iter().map(Tokenizable::into_token).collect())
344 }
345}
346
347impl<T: TokenizableItem> TokenizableItem for Vec<T> {}
348
349macro_rules! impl_fixed_types {
350 ($num: expr) => {
351 impl Tokenizable for [u8; $num] {
352 fn from_token(token: Token) -> Result<Self, Error> {
353 match token {
354 Token::FixedBytes(bytes) => {
355 if bytes.len() != $num {
356 return Err(Error::InvalidOutputType(format!(
357 "Expected `FixedBytes({})`, got FixedBytes({})",
358 $num,
359 bytes.len()
360 )));
361 }
362
363 let mut arr = [0; $num];
364 arr.copy_from_slice(&bytes);
365 Ok(arr)
366 }
367 other => Err(
368 Error::InvalidOutputType(format!("Expected `FixedBytes({})`, got {:?}", $num, other)).into(),
369 ),
370 }
371 }
372
373 fn into_token(self) -> Token {
374 Token::FixedBytes(self.to_vec())
375 }
376 }
377
378 impl TokenizableItem for [u8; $num] {}
379
380 impl<T: TokenizableItem + Clone> Tokenizable for [T; $num] {
381 fn from_token(token: Token) -> Result<Self, Error> {
382 match token {
383 Token::FixedArray(tokens) => {
384 if tokens.len() != $num {
385 return Err(Error::InvalidOutputType(format!(
386 "Expected `FixedArray({})`, got FixedArray({})",
387 $num,
388 tokens.len()
389 )));
390 }
391
392 let mut arr = ArrayVec::<T, $num>::new();
393 let mut it = tokens.into_iter().map(T::from_token);
394 for _ in 0..$num {
395 arr.push(it.next().expect("Length validated in guard; qed")?);
396 }
397 match arr.into_inner() {
399 Ok(arr) => Ok(arr),
400 Err(_) => panic!("All elements inserted so the array is full; qed"),
401 }
402 }
403 other => Err(
404 Error::InvalidOutputType(format!("Expected `FixedArray({})`, got {:?}", $num, other)).into(),
405 ),
406 }
407 }
408
409 fn into_token(self) -> Token {
410 Token::FixedArray(ArrayVec::from(self).into_iter().map(T::into_token).collect())
411 }
412 }
413
414 impl<T: TokenizableItem + Clone> TokenizableItem for [T; $num] {}
415 };
416}
417
418impl_fixed_types!(1);
419impl_fixed_types!(2);
420impl_fixed_types!(3);
421impl_fixed_types!(4);
422impl_fixed_types!(5);
423impl_fixed_types!(6);
424impl_fixed_types!(7);
425impl_fixed_types!(8);
426impl_fixed_types!(9);
427impl_fixed_types!(10);
428impl_fixed_types!(11);
429impl_fixed_types!(12);
430impl_fixed_types!(13);
431impl_fixed_types!(14);
432impl_fixed_types!(15);
433impl_fixed_types!(16);
434impl_fixed_types!(32);
435impl_fixed_types!(64);
436impl_fixed_types!(128);
437impl_fixed_types!(256);
438impl_fixed_types!(512);
439impl_fixed_types!(1024);
440
441#[cfg(test)]
442mod tests {
443 use super::{Detokenize, Tokenizable};
444 use crate::types::{Address, BytesArray, U256};
445 use ethabi::{Token, Uint};
446 use hex_literal::hex;
447
448 fn output<R: Detokenize>() -> R {
449 unimplemented!()
450 }
451
452 #[test]
453 #[ignore]
454 fn should_be_able_to_compile() {
455 let _tokens: Vec<Token> = output();
456 let _uint: U256 = output();
457 let _address: Address = output();
458 let _string: String = output();
459 let _bool: bool = output();
460 let _bytes: Vec<u8> = output();
461 let _bytes_array: BytesArray = output();
462
463 let _pair: (U256, bool) = output();
464 let _vec: Vec<U256> = output();
465 let _array: [U256; 4] = output();
466 let _bytes: Vec<[[u8; 1]; 64]> = output();
467
468 let _mixed: (Vec<Vec<u8>>, [U256; 4], Vec<U256>, U256) = output();
469
470 let _ints: (i8, i16, i32, i64, i128) = output();
471 let _uints: (u16, u32, u64, u128) = output();
472 }
473
474 #[test]
475 fn should_decode_array_of_fixed_bytes() {
476 let tokens = vec![Token::FixedArray(vec![
478 Token::FixedBytes(hex!("01").into()),
479 Token::FixedBytes(hex!("02").into()),
480 Token::FixedBytes(hex!("03").into()),
481 Token::FixedBytes(hex!("04").into()),
482 Token::FixedBytes(hex!("05").into()),
483 Token::FixedBytes(hex!("06").into()),
484 Token::FixedBytes(hex!("07").into()),
485 Token::FixedBytes(hex!("08").into()),
486 ])];
487 let data: [[u8; 1]; 8] = Detokenize::from_tokens(tokens).unwrap();
488 assert_eq!(data[0][0], 1);
489 assert_eq!(data[1][0], 2);
490 assert_eq!(data[2][0], 3);
491 assert_eq!(data[7][0], 8);
492 }
493
494 #[test]
495 fn should_decode_array_of_bytes() {
496 let token = Token::Array(vec![Token::Uint(Uint::from(0)), Token::Uint(Uint::from(1))]);
497 let data: BytesArray = Tokenizable::from_token(token).unwrap();
498 assert_eq!(data.0[0], 0);
499 assert_eq!(data.0[1], 1);
500 }
501
502 #[test]
503 fn should_sign_extend_negative_integers() {
504 assert_eq!((-1i8).into_token(), Token::Int(U256::MAX));
505 assert_eq!((-2i16).into_token(), Token::Int(U256::MAX - 1));
506 assert_eq!((-3i32).into_token(), Token::Int(U256::MAX - 2));
507 assert_eq!((-4i64).into_token(), Token::Int(U256::MAX - 3));
508 assert_eq!((-5i128).into_token(), Token::Int(U256::MAX - 4));
509 }
510}