surrealdb_core/syn/lexer/compound/
mod.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
use crate::syn::{
	error::SyntaxError,
	lexer::Lexer,
	token::{Span, Token},
};

mod datetime;
mod ident;
mod js;
mod number;
mod regex;
mod strand;
mod uuid;

pub use datetime::{datetime, datetime_inner};
pub use ident::flexible_ident;
pub use js::javascript;
pub use number::{
	duration, float, integer, number, numeric, numeric_kind, NumberKind, Numeric, NumericKind,
};
pub use regex::regex;
pub use strand::strand;
pub use uuid::uuid;

#[derive(Debug)]
pub struct CompoundToken<T> {
	pub value: T,
	pub span: Span,
}

impl Lexer<'_> {
	/// Lex a more complex token from the start token.
	/// The start token should already be consumed.
	pub fn lex_compound<F, R>(
		&mut self,
		start: Token,
		f: F,
	) -> Result<CompoundToken<R>, SyntaxError>
	where
		F: Fn(&mut Self, Token) -> Result<R, SyntaxError>,
	{
		assert_eq!(
			self.last_offset,
			start.span.offset + start.span.len,
			"The start token given to compound was not the last token consumed."
		);

		self.last_offset = start.span.offset;

		let res = f(self, start)?;

		Ok(CompoundToken {
			value: res,
			span: self.advance_span(),
		})
	}
}