1use crate::buffer_queue::BufferQueue;
13use crate::tokenizer::{Tokenizer, TokenizerOpts, TokenizerResult};
14use crate::tree_builder::{create_element, TreeBuilder, TreeBuilderOpts, TreeSink};
15use crate::{Attribute, QualName};
16
17use std::borrow::Cow;
18
19use crate::tendril;
20use crate::tendril::stream::{TendrilSink, Utf8LossyDecoder};
21use crate::tendril::StrTendril;
22
23#[derive(Clone, Default)]
25pub struct ParseOpts {
26 pub tokenizer: TokenizerOpts,
28
29 pub tree_builder: TreeBuilderOpts,
31}
32
33pub fn parse_document<Sink>(sink: Sink, opts: ParseOpts) -> Parser<Sink>
41where
42 Sink: TreeSink,
43{
44 let tb = TreeBuilder::new(sink, opts.tree_builder);
45 let tok = Tokenizer::new(tb, opts.tokenizer);
46 Parser {
47 tokenizer: tok,
48 input_buffer: BufferQueue::default(),
49 }
50}
51
52pub fn parse_fragment<Sink>(
60 sink: Sink,
61 opts: ParseOpts,
62 context_name: QualName,
63 context_attrs: Vec<Attribute>,
64) -> Parser<Sink>
65where
66 Sink: TreeSink,
67{
68 let context_elem = create_element(&sink, context_name, context_attrs);
69 parse_fragment_for_element(sink, opts, context_elem, None)
70}
71
72pub fn parse_fragment_for_element<Sink>(
75 sink: Sink,
76 opts: ParseOpts,
77 context_element: Sink::Handle,
78 form_element: Option<Sink::Handle>,
79) -> Parser<Sink>
80where
81 Sink: TreeSink,
82{
83 let tb = TreeBuilder::new_for_fragment(sink, context_element, form_element, opts.tree_builder);
84 let tok_opts = TokenizerOpts {
85 initial_state: Some(tb.tokenizer_state_for_context_elem()),
86 ..opts.tokenizer
87 };
88 let tok = Tokenizer::new(tb, tok_opts);
89 Parser {
90 tokenizer: tok,
91 input_buffer: BufferQueue::default(),
92 }
93}
94
95pub struct Parser<Sink>
98where
99 Sink: TreeSink,
100{
101 pub tokenizer: Tokenizer<TreeBuilder<Sink::Handle, Sink>>,
102 pub input_buffer: BufferQueue,
103}
104
105impl<Sink: TreeSink> TendrilSink<tendril::fmt::UTF8> for Parser<Sink> {
106 fn process(&mut self, t: StrTendril) {
107 self.input_buffer.push_back(t);
108 while let TokenizerResult::Script(_) = self.tokenizer.feed(&self.input_buffer) {}
110 }
111
112 fn error(&mut self, desc: Cow<'static, str>) {
114 self.tokenizer.sink.sink.parse_error(desc)
115 }
116
117 type Output = Sink::Output;
118
119 fn finish(self) -> Self::Output {
120 while let TokenizerResult::Script(_) = self.tokenizer.feed(&self.input_buffer) {}
122 assert!(self.input_buffer.is_empty());
123 self.tokenizer.end();
124 self.tokenizer.sink.sink.finish()
125 }
126}
127
128impl<Sink: TreeSink> Parser<Sink> {
129 #[allow(clippy::wrong_self_convention)]
134 pub fn from_utf8(self) -> Utf8LossyDecoder<Self> {
135 Utf8LossyDecoder::new(self)
136 }
137}