rust_htslib/bam/
pileup.rs1use std::fmt;
7use std::iter;
8use std::slice;
9
10use crate::htslib;
11
12use crate::bam;
13use crate::bam::record;
14use crate::errors::{Error, Result};
15
16pub type Alignments<'a> = iter::Map<
18 slice::Iter<'a, htslib::bam_pileup1_t>,
19 fn(&'a htslib::bam_pileup1_t) -> Alignment<'a>,
20>;
21
22#[derive(Debug)]
24pub struct Pileup {
25 inner: *const htslib::bam_pileup1_t,
26 depth: u32,
27 tid: u32,
28 pos: u32,
29}
30
31impl Pileup {
32 pub fn tid(&self) -> u32 {
33 self.tid
34 }
35
36 pub fn pos(&self) -> u32 {
37 self.pos
38 }
39
40 pub fn depth(&self) -> u32 {
41 self.depth
42 }
43
44 pub fn alignments(&self) -> Alignments<'_> {
45 self.inner().iter().map(Alignment::new)
46 }
47
48 fn inner(&self) -> &[htslib::bam_pileup1_t] {
49 unsafe {
50 slice::from_raw_parts(
51 self.inner as *mut htslib::bam_pileup1_t,
52 self.depth as usize,
53 )
54 }
55 }
56}
57
58pub struct Alignment<'a> {
60 inner: &'a htslib::bam_pileup1_t,
61}
62
63impl<'a> fmt::Debug for Alignment<'a> {
64 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
65 write!(f, "Alignment")
66 }
67}
68
69impl<'a> Alignment<'a> {
70 pub fn new(inner: &'a htslib::bam_pileup1_t) -> Self {
71 Alignment { inner }
72 }
73
74 pub fn qpos(&self) -> Option<usize> {
76 if self.is_del() || self.is_refskip() {
77 None
79 } else {
80 Some(self.inner.qpos as usize)
81 }
82 }
83
84 pub fn indel(&self) -> Indel {
86 match self.inner.indel {
87 len if len < 0 => Indel::Del(-len as u32),
88 len if len > 0 => Indel::Ins(len as u32),
89 _ => Indel::None,
90 }
91 }
92
93 pub fn is_del(&self) -> bool {
95 self.inner.is_del() != 0
96 }
97
98 pub fn is_head(&self) -> bool {
100 self.inner.is_head() != 0
101 }
102
103 pub fn is_tail(&self) -> bool {
105 self.inner.is_tail() != 0
106 }
107
108 pub fn is_refskip(&self) -> bool {
110 self.inner.is_refskip() != 0
111 }
112
113 pub fn record(&self) -> record::Record {
115 record::Record::from_inner(self.inner.b)
116 }
117}
118
119#[derive(PartialEq, Eq, Debug, Copy, Clone, Hash)]
120pub enum Indel {
121 Ins(u32),
122 Del(u32),
123 None,
124}
125
126#[derive(Debug)]
128pub struct Pileups<'a, R: bam::Read> {
129 #[allow(dead_code)]
130 reader: &'a mut R,
131 itr: htslib::bam_plp_t,
132}
133
134impl<'a, R: bam::Read> Pileups<'a, R> {
135 pub fn new(reader: &'a mut R, itr: htslib::bam_plp_t) -> Self {
136 Pileups { reader, itr }
137 }
138
139 pub fn set_max_depth(&mut self, depth: u32) {
142 if depth > i32::max_value() as u32 {
143 panic!(
144 "Maximum value for pileup depth is {} but {} was provided",
145 i32::max_value(),
146 depth
147 )
148 }
149 let intdepth = depth as i32;
150 unsafe {
151 htslib::bam_plp_set_maxcnt(self.itr, intdepth);
152 }
153 }
154}
155
156impl<'a, R: bam::Read> Iterator for Pileups<'a, R> {
157 type Item = Result<Pileup>;
158
159 #[allow(clippy::match_bool)]
160 fn next(&mut self) -> Option<Result<Pileup>> {
161 let (mut tid, mut pos, mut depth) = (0i32, 0i32, 0i32);
162 let inner = unsafe { htslib::bam_plp_auto(self.itr, &mut tid, &mut pos, &mut depth) };
163
164 match inner.is_null() {
165 true if depth == -1 => Some(Err(Error::BamPileup)),
166 true => None,
167 false => Some(Ok(Pileup {
168 inner,
169 depth: depth as u32,
170 tid: tid as u32,
171 pos: pos as u32,
172 })),
173 }
174 }
175}
176
177impl<'a, R: bam::Read> Drop for Pileups<'a, R> {
178 fn drop(&mut self) {
179 unsafe {
180 htslib::bam_plp_reset(self.itr);
181 htslib::bam_plp_destroy(self.itr);
182 }
183 }
184}
185
186#[cfg(test)]
187mod tests {
188
189 use crate::bam;
190 use crate::bam::Read;
191
192 #[test]
193 fn test_max_pileup() {
194 let mut bam = bam::Reader::from_path(&"test/test.bam").unwrap();
195 let mut p = bam.pileup();
196 p.set_max_depth(0u32);
197 p.set_max_depth(800u32);
198 }
199
200 #[test]
201 #[should_panic]
202 fn test_max_pileup_to_high() {
203 let mut bam = bam::Reader::from_path(&"test/test.bam").unwrap();
204 let mut p = bam.pileup();
205 p.set_max_depth((i32::max_value() as u32) + 1);
206 }
207}