ed_journals/modules/logs/asynchronous/
log_file_reader.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
use std::collections::VecDeque;
use std::io;
use std::io::SeekFrom;
use std::path::Path;

use thiserror::Error;
use tokio::fs::File;
use tokio::io::AsyncReadExt;
use tokio::io::AsyncSeekExt;

use crate::logs::content::LogEvent;

#[derive(Debug)]
pub struct LogFileReader {
    source: File,
    position: usize,
    file_read_buffer: String,
    entry_buffer: VecDeque<Result<LogEvent, LogFileReaderError>>,
    failing: bool,
}

#[derive(Debug, Error)]
pub enum LogFileReaderError {
    #[error(transparent)]
    IO(#[from] io::Error),

    #[error("Failed to parse log line: {0}")]
    FailedToParseLine(#[from] serde_json::Error),
}

impl LogFileReader {
    pub async fn open<P: AsRef<Path>>(path: P) -> Result<Self, LogFileReaderError> {
        Ok(LogFileReader {
            source: File::open(path).await?,
            position: 0,
            file_read_buffer: String::new(),
            entry_buffer: VecDeque::new(),
            failing: false,
        })
    }

    async fn read_next(&mut self) -> Result<(), LogFileReaderError> {
        self.source
            .seek(SeekFrom::Start(self.position as u64))
            .await?;
        self.position += self
            .source
            .read_to_string(&mut self.file_read_buffer)
            .await?;

        // Set position back one space to ensure the reader doesn't skip a character during the
        // next read.
        if self.file_read_buffer.ends_with('\n') {
            self.position -= 1;
        }

        let mut lines = self
            .file_read_buffer
            .lines()
            .filter(|line| !line.trim().is_empty())
            .peekable();

        while let Some(line) = lines.next() {
            let parse_result = serde_json::from_str(line.trim_matches('\0'));

            // If the line didn't parse, but the line is the last line that was read, it will not
            // error and instead add the current line back into the read buffer to hopefully be
            // completed when new lines are added.
            if parse_result.is_err() && lines.peek().is_none() {
                self.file_read_buffer = line.to_string();
                return Ok(());
            }

            self.entry_buffer
                .push_back(parse_result.map_err(|e| e.into()));
        }

        // If it reaches this point that means that the whole read buffer has been processed, so it
        // can be cleared.
        self.file_read_buffer = String::new();

        Ok(())
    }

    pub async fn next(&mut self) -> Option<Result<LogEvent, LogFileReaderError>> {
        if self.failing {
            return None;
        }

        let result = self.read_next().await;

        if let Err(error) = result {
            self.failing = true;
            return Some(Err(error));
        }

        self.entry_buffer.pop_front()
    }
}