fuel_tx/transaction/
metadata.rs1use alloc::vec::Vec;
2use fuel_types::{
3 canonical::Serialize,
4 Bytes32,
5 ChainId,
6};
7
8use crate::{
9 field,
10 UniqueIdentifier,
11 ValidityError,
12};
13
14pub trait Cacheable {
16 fn is_computed(&self) -> bool;
20
21 fn precompute(&mut self, chain_id: &ChainId) -> Result<(), ValidityError>;
23}
24
25impl Cacheable for super::Transaction {
26 fn is_computed(&self) -> bool {
27 match self {
28 Self::Script(tx) => tx.is_computed(),
29 Self::Create(tx) => tx.is_computed(),
30 Self::Mint(tx) => tx.is_computed(),
31 Self::Upgrade(tx) => tx.is_computed(),
32 Self::Upload(tx) => tx.is_computed(),
33 Self::Blob(tx) => tx.is_computed(),
34 }
35 }
36
37 fn precompute(&mut self, chain_id: &ChainId) -> Result<(), ValidityError> {
38 match self {
39 Self::Script(tx) => tx.precompute(chain_id),
40 Self::Create(tx) => tx.precompute(chain_id),
41 Self::Mint(tx) => tx.precompute(chain_id),
42 Self::Upgrade(tx) => tx.precompute(chain_id),
43 Self::Upload(tx) => tx.precompute(chain_id),
44 Self::Blob(tx) => tx.precompute(chain_id),
45 }
46 }
47}
48
49#[derive(Debug, Clone, PartialEq, Eq, Hash)]
51pub struct CommonMetadata {
52 pub id: Bytes32,
53 pub inputs_offset: usize,
54 pub inputs_offset_at: Vec<usize>,
55 pub inputs_predicate_offset_at: Vec<Option<(usize, usize)>>,
56 pub outputs_offset: usize,
57 pub outputs_offset_at: Vec<usize>,
58 pub witnesses_offset: usize,
59 pub witnesses_offset_at: Vec<usize>,
60}
61
62impl CommonMetadata {
63 pub fn compute<Tx>(tx: &Tx, chain_id: &ChainId) -> Result<Self, ValidityError>
66 where
67 Tx: UniqueIdentifier,
68 Tx: field::Inputs,
69 Tx: field::Outputs,
70 Tx: field::Witnesses,
71 {
72 use itertools::Itertools;
73
74 let id = tx.id(chain_id);
75
76 let inputs_predicate_offset_at = tx
77 .inputs()
78 .iter()
79 .enumerate()
80 .map(|(i, _)| tx.inputs_predicate_offset_at(i))
81 .collect_vec();
82
83 let mut offset = tx.inputs_offset();
84 let mut inputs_offset_at = Vec::with_capacity(tx.inputs().len());
85 for (index, input) in tx.inputs().iter().enumerate() {
86 let i = offset;
87 offset = offset
88 .checked_add(input.size())
89 .ok_or(ValidityError::SerializedInputTooLarge { index })?;
90 inputs_offset_at.push(i);
91 }
92
93 let mut offset = tx.outputs_offset();
94 let mut outputs_offset_at = Vec::with_capacity(tx.outputs().len());
95 for (index, output) in tx.outputs().iter().enumerate() {
96 let i = offset;
97 offset = offset
98 .checked_add(output.size())
99 .ok_or(ValidityError::SerializedOutputTooLarge { index })?;
100 outputs_offset_at.push(i);
101 }
102
103 let mut offset = tx.witnesses_offset();
104 let mut witnesses_offset_at = Vec::with_capacity(tx.witnesses().len());
105 for (index, witnesses) in tx.witnesses().iter().enumerate() {
106 let i = offset;
107 offset = offset
108 .checked_add(witnesses.size())
109 .ok_or(ValidityError::SerializedWitnessTooLarge { index })?;
110 witnesses_offset_at.push(i);
111 }
112
113 Ok(Self {
114 id,
115 inputs_offset: tx.inputs_offset(),
116 inputs_offset_at,
117 inputs_predicate_offset_at,
118 outputs_offset: tx.outputs_offset(),
119 outputs_offset_at,
120 witnesses_offset: tx.witnesses_offset(),
121 witnesses_offset_at,
122 })
123 }
124}