1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
use std::fmt;
// The type of the parser's input.
//
// The parser iterators over tuples consisting of the token's starting
// position, the token itself, and the token's ending position.
pub(crate) type LexerItem<Tok, Loc, Error>
= ::std::result::Result<(Loc, Tok, Loc), Error>;
/// The components of an OpenPGP Message.
///
/// Note: This enum cannot be exhaustively matched to allow future
/// extensions.
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq)]
#[deprecated(since = "1.9.0",
note = "Not covered by SemVer guarantees, DO NOT match on it.")]
pub enum Token {
/// A Literal data packet.
Literal,
/// A Compressed Data packet.
CompressedData,
/// An SK-ESK packet.
SKESK,
/// An PK-ESK packet.
PKESK,
/// A version 1 SEIP packet.
SEIPv1,
/// An MDC packet.
MDC,
/// An AED packet.
AED,
/// A OnePassSig packet.
OPS,
/// A Signature packet.
SIG,
/// The end of a container (either a Compressed Data packet or a
/// SEIP packet).
Pop,
/// A container's unparsed content.
OpaqueContent,
}
assert_send_and_sync!(Token);
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Debug, Clone)]
pub enum LexicalError {
// There are no lexing errors.
}
assert_send_and_sync!(LexicalError);
impl fmt::Display for LexicalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
pub(crate) struct Lexer<'input> {
iter: Box<dyn Iterator<Item=(usize, &'input Token)> + 'input>,
}
impl<'input> Iterator for Lexer<'input> {
type Item = LexerItem<Token, usize, LexicalError>;
fn next(&mut self) -> Option<Self::Item> {
let n = self.iter.next().map(|(pos, tok)| (pos, *tok));
if let Some((pos, tok)) = n {
Some(Ok((pos, tok, pos)))
} else {
None
}
}
}
impl<'input> Lexer<'input> {
/// Uses a raw sequence of tokens as input to the parser.
pub(crate) fn from_tokens(raw: &'input [Token]) -> Self {
Lexer {
iter: Box::new(raw.iter().enumerate())
}
}
}