hpl_toolkit/compression/
event.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
use crate::schema::{Schema, SchemaValue};
use anchor_lang::{prelude::*, solana_program::keccak};
use spl_account_compression::{wrap_application_data_v1, Noop};

use super::calculate_canopy_depth_header_v1;

#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(AnchorDeserialize, AnchorSerialize, Clone, PartialEq)]
pub enum CompressedDataEventStream {
    Full { data: SchemaValue },
    PatchChunk { key: String, data: SchemaValue },
    Empty,
}

#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(AnchorDeserialize, AnchorSerialize, Clone, PartialEq)]
pub enum CompressedDataEvent {
    Leaf {
        slot: u64,
        tree_id: [u8; 32],
        leaf_idx: u32,
        seq: u64,
        stream_type: CompressedDataEventStream,
    },
    TreeSchemaValue {
        program_id: [u8; 32],
        discriminator: [u8; 32],
        tree_id: [u8; 32],
        canopy_depth: u8,
        schema: Schema,
    },
}
impl CompressedDataEvent {
    pub fn wrap<'info>(&self, noop: &Program<'info, Noop>) -> Result<()> {
        wrap_application_data_v1(self.try_to_vec().unwrap(), noop)
    }

    pub fn tree<'info>(
        max_depth: usize,
        max_buffer_size: usize,
        merkle_tree: &AccountInfo<'info>,
        schema: Schema,
        program_id: Pubkey,
        account_name: String,
    ) -> Result<Self> {
        let tree_size = merkle_tree.data_len();

        Ok(Self::TreeSchemaValue {
            program_id: program_id.to_bytes(),
            discriminator: keccak::hashv(&[program_id.as_ref(), account_name.as_bytes()][..])
                .to_bytes(),
            tree_id: merkle_tree.key().to_bytes(),
            canopy_depth: calculate_canopy_depth_header_v1(max_depth, max_buffer_size, tree_size),
            schema,
        })
    }
}