wit_parser/
ast.rs

1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use anyhow::{bail, Context, Result};
3use lex::{Span, Token, Tokenizer};
4use semver::Version;
5use std::borrow::Cow;
6use std::fmt;
7use std::mem;
8use std::path::{Path, PathBuf};
9
10pub mod lex;
11
12pub use resolve::Resolver;
13mod resolve;
14pub mod toposort;
15
16pub use lex::validate_id;
17
18/// Representation of a single WIT `*.wit` file and nested packages.
19struct PackageFile<'a> {
20    /// Optional `package foo:bar;` header
21    package_id: Option<PackageName<'a>>,
22    /// Other AST items.
23    decl_list: DeclList<'a>,
24}
25
26impl<'a> PackageFile<'a> {
27    /// Parse a standalone file represented by `tokens`.
28    ///
29    /// This will optionally start with `package foo:bar;` and then will have a
30    /// list of ast items after it.
31    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
32        let mut package_name_tokens_peek = tokens.clone();
33        let docs = parse_docs(&mut package_name_tokens_peek)?;
34
35        // Parse `package foo:bar;` but throw it out if it's actually
36        // `package foo:bar { ... }` since that's an ast item instead.
37        let package_id = if package_name_tokens_peek.eat(Token::Package)? {
38            let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
39            if package_name_tokens_peek.eat(Token::Semicolon)? {
40                *tokens = package_name_tokens_peek;
41                Some(name)
42            } else {
43                None
44            }
45        } else {
46            None
47        };
48        let decl_list = DeclList::parse_until(tokens, None)?;
49        Ok(PackageFile {
50            package_id,
51            decl_list,
52        })
53    }
54
55    /// Parse a nested package of the form `package foo:bar { ... }`
56    fn parse_nested(
57        tokens: &mut Tokenizer<'a>,
58        docs: Docs<'a>,
59        attributes: Vec<Attribute<'a>>,
60    ) -> Result<Self> {
61        let span = tokens.expect(Token::Package)?;
62        if !attributes.is_empty() {
63            bail!(Error::new(
64                span,
65                format!("cannot place attributes on nested packages"),
66            ));
67        }
68        let package_id = PackageName::parse(tokens, docs)?;
69        tokens.expect(Token::LeftBrace)?;
70        let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
71        Ok(PackageFile {
72            package_id: Some(package_id),
73            decl_list,
74        })
75    }
76}
77
78/// Stores all of the declarations in a package's scope. In AST terms, this
79/// means everything except the `package` declaration that demarcates a package
80/// scope. In the traditional implicit format, these are all of the declarations
81/// non-`package` declarations in the file:
82///
83/// ```wit
84/// package foo:name;
85///
86/// /* START DECL LIST */
87/// // Some comment...
88/// interface i {}
89/// world w {}
90/// /* END DECL LIST */
91/// ```
92///
93/// In the nested package style, a [`DeclList`] is everything inside of each
94/// `package` element's brackets:
95///
96/// ```wit
97/// package foo:name {
98///   /* START FIRST DECL LIST */
99///   // Some comment...
100///   interface i {}
101///   world w {}
102///   /* END FIRST DECL LIST */
103/// }
104///
105/// package bar:name {
106///   /* START SECOND DECL LIST */
107///   // Some comment...
108///   interface i {}
109///   world w {}
110///   /* END SECOND DECL LIST */
111/// }
112/// ```
113#[derive(Default)]
114pub struct DeclList<'a> {
115    items: Vec<AstItem<'a>>,
116}
117
118impl<'a> DeclList<'a> {
119    fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
120        let mut items = Vec::new();
121        let mut docs = parse_docs(tokens)?;
122        loop {
123            match end {
124                Some(end) => {
125                    if tokens.eat(end)? {
126                        break;
127                    }
128                }
129                None => {
130                    if tokens.clone().next()?.is_none() {
131                        break;
132                    }
133                }
134            }
135            items.push(AstItem::parse(tokens, docs)?);
136            docs = parse_docs(tokens)?;
137        }
138        Ok(DeclList { items })
139    }
140
141    fn for_each_path<'b>(
142        &'b self,
143        f: &mut dyn FnMut(
144            Option<&'b Id<'a>>,
145            &'b [Attribute<'a>],
146            &'b UsePath<'a>,
147            Option<&'b [UseName<'a>]>,
148            WorldOrInterface,
149        ) -> Result<()>,
150    ) -> Result<()> {
151        for item in self.items.iter() {
152            match item {
153                AstItem::World(world) => {
154                    // Visit imports here first before exports to help preserve
155                    // round-tripping of documents because printing a world puts
156                    // imports first but textually they can be listed with
157                    // exports first.
158                    let mut imports = Vec::new();
159                    let mut exports = Vec::new();
160                    for item in world.items.iter() {
161                        match item {
162                            WorldItem::Use(u) => f(
163                                None,
164                                &u.attributes,
165                                &u.from,
166                                Some(&u.names),
167                                WorldOrInterface::Interface,
168                            )?,
169                            WorldItem::Include(i) => f(
170                                Some(&world.name),
171                                &i.attributes,
172                                &i.from,
173                                None,
174                                WorldOrInterface::World,
175                            )?,
176                            WorldItem::Type(_) => {}
177                            WorldItem::Import(Import {
178                                kind, attributes, ..
179                            }) => imports.push((kind, attributes)),
180                            WorldItem::Export(Export {
181                                kind, attributes, ..
182                            }) => exports.push((kind, attributes)),
183                        }
184                    }
185
186                    let mut visit_kind =
187                        |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
188                            ExternKind::Interface(_, items) => {
189                                for item in items {
190                                    match item {
191                                        InterfaceItem::Use(u) => f(
192                                            None,
193                                            &u.attributes,
194                                            &u.from,
195                                            Some(&u.names),
196                                            WorldOrInterface::Interface,
197                                        )?,
198                                        _ => {}
199                                    }
200                                }
201                                Ok(())
202                            }
203                            ExternKind::Path(path) => {
204                                f(None, attrs, path, None, WorldOrInterface::Interface)
205                            }
206                            ExternKind::Func(..) => Ok(()),
207                        };
208
209                    for (kind, attrs) in imports {
210                        visit_kind(kind, attrs)?;
211                    }
212                    for (kind, attrs) in exports {
213                        visit_kind(kind, attrs)?;
214                    }
215                }
216                AstItem::Interface(i) => {
217                    for item in i.items.iter() {
218                        match item {
219                            InterfaceItem::Use(u) => f(
220                                Some(&i.name),
221                                &u.attributes,
222                                &u.from,
223                                Some(&u.names),
224                                WorldOrInterface::Interface,
225                            )?,
226                            _ => {}
227                        }
228                    }
229                }
230                AstItem::Use(u) => {
231                    // At the top-level, we don't know if this is a world or an interface
232                    // It is up to the resolver to decides how to handle this ambiguity.
233                    f(
234                        None,
235                        &u.attributes,
236                        &u.item,
237                        None,
238                        WorldOrInterface::Unknown,
239                    )?;
240                }
241
242                AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
243            }
244        }
245        Ok(())
246    }
247}
248
249enum AstItem<'a> {
250    Interface(Interface<'a>),
251    World(World<'a>),
252    Use(ToplevelUse<'a>),
253    Package(PackageFile<'a>),
254}
255
256impl<'a> AstItem<'a> {
257    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
258        let attributes = Attribute::parse_list(tokens)?;
259        match tokens.clone().next()? {
260            Some((_span, Token::Interface)) => {
261                Interface::parse(tokens, docs, attributes).map(Self::Interface)
262            }
263            Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
264            Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
265            Some((_span, Token::Package)) => {
266                PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
267            }
268            other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
269        }
270    }
271}
272
273#[derive(Debug, Clone)]
274struct PackageName<'a> {
275    docs: Docs<'a>,
276    span: Span,
277    namespace: Id<'a>,
278    name: Id<'a>,
279    version: Option<(Span, Version)>,
280}
281
282impl<'a> PackageName<'a> {
283    fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
284        let namespace = parse_id(tokens)?;
285        tokens.expect(Token::Colon)?;
286        let name = parse_id(tokens)?;
287        let version = parse_opt_version(tokens)?;
288        Ok(PackageName {
289            docs,
290            span: Span {
291                start: namespace.span.start,
292                end: version
293                    .as_ref()
294                    .map(|(s, _)| s.end)
295                    .unwrap_or(name.span.end),
296            },
297            namespace,
298            name,
299            version,
300        })
301    }
302
303    fn package_name(&self) -> crate::PackageName {
304        crate::PackageName {
305            namespace: self.namespace.name.to_string(),
306            name: self.name.name.to_string(),
307            version: self.version.as_ref().map(|(_, v)| v.clone()),
308        }
309    }
310}
311
312struct ToplevelUse<'a> {
313    span: Span,
314    attributes: Vec<Attribute<'a>>,
315    item: UsePath<'a>,
316    as_: Option<Id<'a>>,
317}
318
319impl<'a> ToplevelUse<'a> {
320    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
321        let span = tokens.expect(Token::Use)?;
322        let item = UsePath::parse(tokens)?;
323        let as_ = if tokens.eat(Token::As)? {
324            Some(parse_id(tokens)?)
325        } else {
326            None
327        };
328        tokens.expect_semicolon()?;
329        Ok(ToplevelUse {
330            span,
331            attributes,
332            item,
333            as_,
334        })
335    }
336}
337
338struct World<'a> {
339    docs: Docs<'a>,
340    attributes: Vec<Attribute<'a>>,
341    name: Id<'a>,
342    items: Vec<WorldItem<'a>>,
343}
344
345impl<'a> World<'a> {
346    fn parse(
347        tokens: &mut Tokenizer<'a>,
348        docs: Docs<'a>,
349        attributes: Vec<Attribute<'a>>,
350    ) -> Result<Self> {
351        tokens.expect(Token::World)?;
352        let name = parse_id(tokens)?;
353        let items = Self::parse_items(tokens)?;
354        Ok(World {
355            docs,
356            attributes,
357            name,
358            items,
359        })
360    }
361
362    fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
363        tokens.expect(Token::LeftBrace)?;
364        let mut items = Vec::new();
365        loop {
366            let docs = parse_docs(tokens)?;
367            if tokens.eat(Token::RightBrace)? {
368                break;
369            }
370            let attributes = Attribute::parse_list(tokens)?;
371            items.push(WorldItem::parse(tokens, docs, attributes)?);
372        }
373        Ok(items)
374    }
375}
376
377enum WorldItem<'a> {
378    Import(Import<'a>),
379    Export(Export<'a>),
380    Use(Use<'a>),
381    Type(TypeDef<'a>),
382    Include(Include<'a>),
383}
384
385impl<'a> WorldItem<'a> {
386    fn parse(
387        tokens: &mut Tokenizer<'a>,
388        docs: Docs<'a>,
389        attributes: Vec<Attribute<'a>>,
390    ) -> Result<WorldItem<'a>> {
391        match tokens.clone().next()? {
392            Some((_span, Token::Import)) => {
393                Import::parse(tokens, docs, attributes).map(WorldItem::Import)
394            }
395            Some((_span, Token::Export)) => {
396                Export::parse(tokens, docs, attributes).map(WorldItem::Export)
397            }
398            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
399            Some((_span, Token::Type)) => {
400                TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
401            }
402            Some((_span, Token::Flags)) => {
403                TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
404            }
405            Some((_span, Token::Resource)) => {
406                TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
407            }
408            Some((_span, Token::Record)) => {
409                TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
410            }
411            Some((_span, Token::Variant)) => {
412                TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
413            }
414            Some((_span, Token::Enum)) => {
415                TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
416            }
417            Some((_span, Token::Include)) => {
418                Include::parse(tokens, attributes).map(WorldItem::Include)
419            }
420            other => Err(err_expected(
421                tokens,
422                "`import`, `export`, `include`, `use`, or type definition",
423                other,
424            )
425            .into()),
426        }
427    }
428}
429
430struct Import<'a> {
431    docs: Docs<'a>,
432    attributes: Vec<Attribute<'a>>,
433    kind: ExternKind<'a>,
434}
435
436impl<'a> Import<'a> {
437    fn parse(
438        tokens: &mut Tokenizer<'a>,
439        docs: Docs<'a>,
440        attributes: Vec<Attribute<'a>>,
441    ) -> Result<Import<'a>> {
442        tokens.expect(Token::Import)?;
443        let kind = ExternKind::parse(tokens)?;
444        Ok(Import {
445            docs,
446            attributes,
447            kind,
448        })
449    }
450}
451
452struct Export<'a> {
453    docs: Docs<'a>,
454    attributes: Vec<Attribute<'a>>,
455    kind: ExternKind<'a>,
456}
457
458impl<'a> Export<'a> {
459    fn parse(
460        tokens: &mut Tokenizer<'a>,
461        docs: Docs<'a>,
462        attributes: Vec<Attribute<'a>>,
463    ) -> Result<Export<'a>> {
464        tokens.expect(Token::Export)?;
465        let kind = ExternKind::parse(tokens)?;
466        Ok(Export {
467            docs,
468            attributes,
469            kind,
470        })
471    }
472}
473
474enum ExternKind<'a> {
475    Interface(Id<'a>, Vec<InterfaceItem<'a>>),
476    Path(UsePath<'a>),
477    Func(Id<'a>, Func<'a>),
478}
479
480impl<'a> ExternKind<'a> {
481    fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
482        // Create a copy of the token stream to test out if this is a function
483        // or an interface import. In those situations the token stream gets
484        // reset to the state of the clone and we continue down those paths.
485        //
486        // If neither a function nor an interface appears here though then the
487        // clone is thrown away and the original token stream is parsed for an
488        // interface. This will redo the original ID parse and the original
489        // colon parse, but that shouldn't be too too bad perf-wise.
490        let mut clone = tokens.clone();
491        let id = parse_id(&mut clone)?;
492        if clone.eat(Token::Colon)? {
493            // import foo: func(...)
494            if clone.clone().eat(Token::Func)? {
495                *tokens = clone;
496                let ret = ExternKind::Func(id, Func::parse(tokens)?);
497                tokens.expect_semicolon()?;
498                return Ok(ret);
499            }
500
501            // import foo: interface { ... }
502            if clone.eat(Token::Interface)? {
503                *tokens = clone;
504                return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
505            }
506        }
507
508        // import foo
509        // import foo/bar
510        // import foo:bar/baz
511        let ret = ExternKind::Path(UsePath::parse(tokens)?);
512        tokens.expect_semicolon()?;
513        Ok(ret)
514    }
515
516    fn span(&self) -> Span {
517        match self {
518            ExternKind::Interface(id, _) => id.span,
519            ExternKind::Path(UsePath::Id(id)) => id.span,
520            ExternKind::Path(UsePath::Package { name, .. }) => name.span,
521            ExternKind::Func(id, _) => id.span,
522        }
523    }
524}
525
526struct Interface<'a> {
527    docs: Docs<'a>,
528    attributes: Vec<Attribute<'a>>,
529    name: Id<'a>,
530    items: Vec<InterfaceItem<'a>>,
531}
532
533impl<'a> Interface<'a> {
534    fn parse(
535        tokens: &mut Tokenizer<'a>,
536        docs: Docs<'a>,
537        attributes: Vec<Attribute<'a>>,
538    ) -> Result<Self> {
539        tokens.expect(Token::Interface)?;
540        let name = parse_id(tokens)?;
541        let items = Self::parse_items(tokens)?;
542        Ok(Interface {
543            docs,
544            attributes,
545            name,
546            items,
547        })
548    }
549
550    pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
551        tokens.expect(Token::LeftBrace)?;
552        let mut items = Vec::new();
553        loop {
554            let docs = parse_docs(tokens)?;
555            if tokens.eat(Token::RightBrace)? {
556                break;
557            }
558            let attributes = Attribute::parse_list(tokens)?;
559            items.push(InterfaceItem::parse(tokens, docs, attributes)?);
560        }
561        Ok(items)
562    }
563}
564
565#[derive(Debug)]
566pub enum WorldOrInterface {
567    World,
568    Interface,
569    Unknown,
570}
571
572enum InterfaceItem<'a> {
573    TypeDef(TypeDef<'a>),
574    Func(NamedFunc<'a>),
575    Use(Use<'a>),
576}
577
578struct Use<'a> {
579    attributes: Vec<Attribute<'a>>,
580    from: UsePath<'a>,
581    names: Vec<UseName<'a>>,
582}
583
584#[derive(Debug)]
585enum UsePath<'a> {
586    Id(Id<'a>),
587    Package { id: PackageName<'a>, name: Id<'a> },
588}
589
590impl<'a> UsePath<'a> {
591    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
592        let id = parse_id(tokens)?;
593        if tokens.eat(Token::Colon)? {
594            // `foo:bar/baz@1.0`
595            let namespace = id;
596            let pkg_name = parse_id(tokens)?;
597            tokens.expect(Token::Slash)?;
598            let name = parse_id(tokens)?;
599            let version = parse_opt_version(tokens)?;
600            Ok(UsePath::Package {
601                id: PackageName {
602                    docs: Default::default(),
603                    span: Span {
604                        start: namespace.span.start,
605                        end: pkg_name.span.end,
606                    },
607                    namespace,
608                    name: pkg_name,
609                    version,
610                },
611                name,
612            })
613        } else {
614            // `foo`
615            Ok(UsePath::Id(id))
616        }
617    }
618
619    fn name(&self) -> &Id<'a> {
620        match self {
621            UsePath::Id(id) => id,
622            UsePath::Package { name, .. } => name,
623        }
624    }
625}
626
627struct UseName<'a> {
628    name: Id<'a>,
629    as_: Option<Id<'a>>,
630}
631
632impl<'a> Use<'a> {
633    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
634        tokens.expect(Token::Use)?;
635        let from = UsePath::parse(tokens)?;
636        tokens.expect(Token::Period)?;
637        tokens.expect(Token::LeftBrace)?;
638
639        let mut names = Vec::new();
640        while !tokens.eat(Token::RightBrace)? {
641            let mut name = UseName {
642                name: parse_id(tokens)?,
643                as_: None,
644            };
645            if tokens.eat(Token::As)? {
646                name.as_ = Some(parse_id(tokens)?);
647            }
648            names.push(name);
649            if !tokens.eat(Token::Comma)? {
650                tokens.expect(Token::RightBrace)?;
651                break;
652            }
653        }
654        tokens.expect_semicolon()?;
655        Ok(Use {
656            attributes,
657            from,
658            names,
659        })
660    }
661}
662
663struct Include<'a> {
664    from: UsePath<'a>,
665    attributes: Vec<Attribute<'a>>,
666    names: Vec<IncludeName<'a>>,
667}
668
669struct IncludeName<'a> {
670    name: Id<'a>,
671    as_: Id<'a>,
672}
673
674impl<'a> Include<'a> {
675    fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
676        tokens.expect(Token::Include)?;
677        let from = UsePath::parse(tokens)?;
678
679        let names = if tokens.eat(Token::With)? {
680            parse_list(
681                tokens,
682                Token::LeftBrace,
683                Token::RightBrace,
684                |_docs, tokens| {
685                    let name = parse_id(tokens)?;
686                    tokens.expect(Token::As)?;
687                    let as_ = parse_id(tokens)?;
688                    Ok(IncludeName { name, as_ })
689                },
690            )?
691        } else {
692            tokens.expect_semicolon()?;
693            Vec::new()
694        };
695
696        Ok(Include {
697            attributes,
698            from,
699            names,
700        })
701    }
702}
703
704#[derive(Debug, Clone)]
705pub struct Id<'a> {
706    name: &'a str,
707    span: Span,
708}
709
710impl<'a> From<&'a str> for Id<'a> {
711    fn from(s: &'a str) -> Id<'a> {
712        Id {
713            name: s.into(),
714            span: Span { start: 0, end: 0 },
715        }
716    }
717}
718
719#[derive(Debug, Clone)]
720pub struct Docs<'a> {
721    docs: Vec<Cow<'a, str>>,
722    span: Span,
723}
724
725impl<'a> Default for Docs<'a> {
726    fn default() -> Self {
727        Self {
728            docs: Default::default(),
729            span: Span { start: 0, end: 0 },
730        }
731    }
732}
733
734struct TypeDef<'a> {
735    docs: Docs<'a>,
736    attributes: Vec<Attribute<'a>>,
737    name: Id<'a>,
738    ty: Type<'a>,
739}
740
741enum Type<'a> {
742    Bool(Span),
743    U8(Span),
744    U16(Span),
745    U32(Span),
746    U64(Span),
747    S8(Span),
748    S16(Span),
749    S32(Span),
750    S64(Span),
751    F32(Span),
752    F64(Span),
753    Char(Span),
754    String(Span),
755    Name(Id<'a>),
756    List(List<'a>),
757    Handle(Handle<'a>),
758    Resource(Resource<'a>),
759    Record(Record<'a>),
760    Flags(Flags<'a>),
761    Variant(Variant<'a>),
762    Tuple(Tuple<'a>),
763    Enum(Enum<'a>),
764    Option(Option_<'a>),
765    Result(Result_<'a>),
766    Future(Future<'a>),
767    Stream(Stream<'a>),
768    ErrorContext(Span),
769}
770
771enum Handle<'a> {
772    Own { resource: Id<'a> },
773    Borrow { resource: Id<'a> },
774}
775
776impl Handle<'_> {
777    fn span(&self) -> Span {
778        match self {
779            Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
780        }
781    }
782}
783
784struct Resource<'a> {
785    span: Span,
786    funcs: Vec<ResourceFunc<'a>>,
787}
788
789enum ResourceFunc<'a> {
790    Method(NamedFunc<'a>),
791    Static(NamedFunc<'a>),
792    Constructor(NamedFunc<'a>),
793}
794
795impl<'a> ResourceFunc<'a> {
796    fn parse(
797        docs: Docs<'a>,
798        attributes: Vec<Attribute<'a>>,
799        tokens: &mut Tokenizer<'a>,
800    ) -> Result<Self> {
801        match tokens.clone().next()? {
802            Some((span, Token::Constructor)) => {
803                tokens.expect(Token::Constructor)?;
804                tokens.expect(Token::LeftParen)?;
805                let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
806                    let name = parse_id(tokens)?;
807                    tokens.expect(Token::Colon)?;
808                    let ty = Type::parse(tokens)?;
809                    Ok((name, ty))
810                })?;
811                tokens.expect_semicolon()?;
812                Ok(ResourceFunc::Constructor(NamedFunc {
813                    docs,
814                    attributes,
815                    name: Id {
816                        span,
817                        name: "constructor",
818                    },
819                    func: Func {
820                        span,
821                        params,
822                        result: None,
823                    },
824                }))
825            }
826            Some((_span, Token::Id | Token::ExplicitId)) => {
827                let name = parse_id(tokens)?;
828                tokens.expect(Token::Colon)?;
829                let ctor = if tokens.eat(Token::Static)? {
830                    ResourceFunc::Static
831                } else {
832                    ResourceFunc::Method
833                };
834                let func = Func::parse(tokens)?;
835                tokens.expect_semicolon()?;
836                Ok(ctor(NamedFunc {
837                    docs,
838                    attributes,
839                    name,
840                    func,
841                }))
842            }
843            other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
844        }
845    }
846
847    fn named_func(&self) -> &NamedFunc<'a> {
848        use ResourceFunc::*;
849        match self {
850            Method(f) | Static(f) | Constructor(f) => f,
851        }
852    }
853}
854
855struct Record<'a> {
856    span: Span,
857    fields: Vec<Field<'a>>,
858}
859
860struct Field<'a> {
861    docs: Docs<'a>,
862    name: Id<'a>,
863    ty: Type<'a>,
864}
865
866struct Flags<'a> {
867    span: Span,
868    flags: Vec<Flag<'a>>,
869}
870
871struct Flag<'a> {
872    docs: Docs<'a>,
873    name: Id<'a>,
874}
875
876struct Variant<'a> {
877    span: Span,
878    cases: Vec<Case<'a>>,
879}
880
881struct Case<'a> {
882    docs: Docs<'a>,
883    name: Id<'a>,
884    ty: Option<Type<'a>>,
885}
886
887struct Enum<'a> {
888    span: Span,
889    cases: Vec<EnumCase<'a>>,
890}
891
892struct EnumCase<'a> {
893    docs: Docs<'a>,
894    name: Id<'a>,
895}
896
897struct Option_<'a> {
898    span: Span,
899    ty: Box<Type<'a>>,
900}
901
902struct List<'a> {
903    span: Span,
904    ty: Box<Type<'a>>,
905}
906
907struct Future<'a> {
908    span: Span,
909    ty: Option<Box<Type<'a>>>,
910}
911
912struct Tuple<'a> {
913    span: Span,
914    types: Vec<Type<'a>>,
915}
916
917struct Result_<'a> {
918    span: Span,
919    ok: Option<Box<Type<'a>>>,
920    err: Option<Box<Type<'a>>>,
921}
922
923struct Stream<'a> {
924    span: Span,
925    ty: Option<Box<Type<'a>>>,
926}
927
928struct NamedFunc<'a> {
929    docs: Docs<'a>,
930    attributes: Vec<Attribute<'a>>,
931    name: Id<'a>,
932    func: Func<'a>,
933}
934
935type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
936
937struct Func<'a> {
938    span: Span,
939    params: ParamList<'a>,
940    result: Option<Type<'a>>,
941}
942
943impl<'a> Func<'a> {
944    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
945        fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
946            if left_paren {
947                tokens.expect(Token::LeftParen)?;
948            };
949            parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
950                let name = parse_id(tokens)?;
951                tokens.expect(Token::Colon)?;
952                let ty = Type::parse(tokens)?;
953                Ok((name, ty))
954            })
955        }
956
957        let span = tokens.expect(Token::Func)?;
958        let params = parse_params(tokens, true)?;
959        let result = if tokens.eat(Token::RArrow)? {
960            let ty = Type::parse(tokens)?;
961            Some(ty)
962        } else {
963            None
964        };
965        Ok(Func {
966            span,
967            params,
968            result,
969        })
970    }
971}
972
973impl<'a> InterfaceItem<'a> {
974    fn parse(
975        tokens: &mut Tokenizer<'a>,
976        docs: Docs<'a>,
977        attributes: Vec<Attribute<'a>>,
978    ) -> Result<InterfaceItem<'a>> {
979        match tokens.clone().next()? {
980            Some((_span, Token::Type)) => {
981                TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
982            }
983            Some((_span, Token::Flags)) => {
984                TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
985            }
986            Some((_span, Token::Enum)) => {
987                TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
988            }
989            Some((_span, Token::Variant)) => {
990                TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
991            }
992            Some((_span, Token::Resource)) => {
993                TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
994            }
995            Some((_span, Token::Record)) => {
996                TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
997            }
998            Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
999                NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1000            }
1001            Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1002            other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1003        }
1004    }
1005}
1006
1007impl<'a> TypeDef<'a> {
1008    fn parse(
1009        tokens: &mut Tokenizer<'a>,
1010        docs: Docs<'a>,
1011        attributes: Vec<Attribute<'a>>,
1012    ) -> Result<Self> {
1013        tokens.expect(Token::Type)?;
1014        let name = parse_id(tokens)?;
1015        tokens.expect(Token::Equals)?;
1016        let ty = Type::parse(tokens)?;
1017        tokens.expect_semicolon()?;
1018        Ok(TypeDef {
1019            docs,
1020            attributes,
1021            name,
1022            ty,
1023        })
1024    }
1025
1026    fn parse_flags(
1027        tokens: &mut Tokenizer<'a>,
1028        docs: Docs<'a>,
1029        attributes: Vec<Attribute<'a>>,
1030    ) -> Result<Self> {
1031        tokens.expect(Token::Flags)?;
1032        let name = parse_id(tokens)?;
1033        let ty = Type::Flags(Flags {
1034            span: name.span,
1035            flags: parse_list(
1036                tokens,
1037                Token::LeftBrace,
1038                Token::RightBrace,
1039                |docs, tokens| {
1040                    let name = parse_id(tokens)?;
1041                    Ok(Flag { docs, name })
1042                },
1043            )?,
1044        });
1045        Ok(TypeDef {
1046            docs,
1047            attributes,
1048            name,
1049            ty,
1050        })
1051    }
1052
1053    fn parse_resource(
1054        tokens: &mut Tokenizer<'a>,
1055        docs: Docs<'a>,
1056        attributes: Vec<Attribute<'a>>,
1057    ) -> Result<Self> {
1058        tokens.expect(Token::Resource)?;
1059        let name = parse_id(tokens)?;
1060        let mut funcs = Vec::new();
1061        if tokens.eat(Token::LeftBrace)? {
1062            while !tokens.eat(Token::RightBrace)? {
1063                let docs = parse_docs(tokens)?;
1064                let attributes = Attribute::parse_list(tokens)?;
1065                funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1066            }
1067        } else {
1068            tokens.expect_semicolon()?;
1069        }
1070        let ty = Type::Resource(Resource {
1071            span: name.span,
1072            funcs,
1073        });
1074        Ok(TypeDef {
1075            docs,
1076            attributes,
1077            name,
1078            ty,
1079        })
1080    }
1081
1082    fn parse_record(
1083        tokens: &mut Tokenizer<'a>,
1084        docs: Docs<'a>,
1085        attributes: Vec<Attribute<'a>>,
1086    ) -> Result<Self> {
1087        tokens.expect(Token::Record)?;
1088        let name = parse_id(tokens)?;
1089        let ty = Type::Record(Record {
1090            span: name.span,
1091            fields: parse_list(
1092                tokens,
1093                Token::LeftBrace,
1094                Token::RightBrace,
1095                |docs, tokens| {
1096                    let name = parse_id(tokens)?;
1097                    tokens.expect(Token::Colon)?;
1098                    let ty = Type::parse(tokens)?;
1099                    Ok(Field { docs, name, ty })
1100                },
1101            )?,
1102        });
1103        Ok(TypeDef {
1104            docs,
1105            attributes,
1106            name,
1107            ty,
1108        })
1109    }
1110
1111    fn parse_variant(
1112        tokens: &mut Tokenizer<'a>,
1113        docs: Docs<'a>,
1114        attributes: Vec<Attribute<'a>>,
1115    ) -> Result<Self> {
1116        tokens.expect(Token::Variant)?;
1117        let name = parse_id(tokens)?;
1118        let ty = Type::Variant(Variant {
1119            span: name.span,
1120            cases: parse_list(
1121                tokens,
1122                Token::LeftBrace,
1123                Token::RightBrace,
1124                |docs, tokens| {
1125                    let name = parse_id(tokens)?;
1126                    let ty = if tokens.eat(Token::LeftParen)? {
1127                        let ty = Type::parse(tokens)?;
1128                        tokens.expect(Token::RightParen)?;
1129                        Some(ty)
1130                    } else {
1131                        None
1132                    };
1133                    Ok(Case { docs, name, ty })
1134                },
1135            )?,
1136        });
1137        Ok(TypeDef {
1138            docs,
1139            attributes,
1140            name,
1141            ty,
1142        })
1143    }
1144
1145    fn parse_enum(
1146        tokens: &mut Tokenizer<'a>,
1147        docs: Docs<'a>,
1148        attributes: Vec<Attribute<'a>>,
1149    ) -> Result<Self> {
1150        tokens.expect(Token::Enum)?;
1151        let name = parse_id(tokens)?;
1152        let ty = Type::Enum(Enum {
1153            span: name.span,
1154            cases: parse_list(
1155                tokens,
1156                Token::LeftBrace,
1157                Token::RightBrace,
1158                |docs, tokens| {
1159                    let name = parse_id(tokens)?;
1160                    Ok(EnumCase { docs, name })
1161                },
1162            )?,
1163        });
1164        Ok(TypeDef {
1165            docs,
1166            attributes,
1167            name,
1168            ty,
1169        })
1170    }
1171}
1172
1173impl<'a> NamedFunc<'a> {
1174    fn parse(
1175        tokens: &mut Tokenizer<'a>,
1176        docs: Docs<'a>,
1177        attributes: Vec<Attribute<'a>>,
1178    ) -> Result<Self> {
1179        let name = parse_id(tokens)?;
1180        tokens.expect(Token::Colon)?;
1181        let func = Func::parse(tokens)?;
1182        tokens.expect_semicolon()?;
1183        Ok(NamedFunc {
1184            docs,
1185            attributes,
1186            name,
1187            func,
1188        })
1189    }
1190}
1191
1192fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1193    match tokens.next()? {
1194        Some((span, Token::Id)) => Ok(Id {
1195            name: tokens.parse_id(span)?,
1196            span,
1197        }),
1198        Some((span, Token::ExplicitId)) => Ok(Id {
1199            name: tokens.parse_explicit_id(span)?,
1200            span,
1201        }),
1202        other => Err(err_expected(tokens, "an identifier or string", other).into()),
1203    }
1204}
1205
1206fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1207    if tokens.eat(Token::At)? {
1208        parse_version(tokens).map(Some)
1209    } else {
1210        Ok(None)
1211    }
1212}
1213
1214fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1215    let start = tokens.expect(Token::Integer)?.start;
1216    tokens.expect(Token::Period)?;
1217    tokens.expect(Token::Integer)?;
1218    tokens.expect(Token::Period)?;
1219    let end = tokens.expect(Token::Integer)?.end;
1220    let mut span = Span { start, end };
1221    eat_ids(tokens, Token::Minus, &mut span)?;
1222    eat_ids(tokens, Token::Plus, &mut span)?;
1223    let string = tokens.get_span(span);
1224    let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1225    return Ok((span, version));
1226
1227    // According to `semver.org` this is what we're parsing:
1228    //
1229    // ```ebnf
1230    // <pre-release> ::= <dot-separated pre-release identifiers>
1231    //
1232    // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1233    //                                           | <pre-release identifier> "." <dot-separated pre-release identifiers>
1234    //
1235    // <build> ::= <dot-separated build identifiers>
1236    //
1237    // <dot-separated build identifiers> ::= <build identifier>
1238    //                                     | <build identifier> "." <dot-separated build identifiers>
1239    //
1240    // <pre-release identifier> ::= <alphanumeric identifier>
1241    //                            | <numeric identifier>
1242    //
1243    // <build identifier> ::= <alphanumeric identifier>
1244    //                      | <digits>
1245    //
1246    // <alphanumeric identifier> ::= <non-digit>
1247    //                             | <non-digit> <identifier characters>
1248    //                             | <identifier characters> <non-digit>
1249    //                             | <identifier characters> <non-digit> <identifier characters>
1250    //
1251    // <numeric identifier> ::= "0"
1252    //                        | <positive digit>
1253    //                        | <positive digit> <digits>
1254    //
1255    // <identifier characters> ::= <identifier character>
1256    //                           | <identifier character> <identifier characters>
1257    //
1258    // <identifier character> ::= <digit>
1259    //                          | <non-digit>
1260    //
1261    // <non-digit> ::= <letter>
1262    //               | "-"
1263    //
1264    // <digits> ::= <digit>
1265    //            | <digit> <digits>
1266    // ```
1267    //
1268    // This is loosely based on WIT syntax and an approximation is parsed here:
1269    //
1270    // * This function starts by parsing the optional leading `-` and `+` which
1271    //   indicates pre-release and build metadata.
1272    // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1273    //   exception here is that if `.` isn't followed by $id, $integer, or `-`
1274    //   then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1275    //   where the `.` is part of WIT syntax, not semver.
1276    //
1277    // Note that this additionally doesn't try to return any first-class errors.
1278    // Instead this bails out on something unrecognized for something else in
1279    // the system to return an error.
1280    fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1281        if !tokens.eat(prefix)? {
1282            return Ok(());
1283        }
1284        loop {
1285            let mut clone = tokens.clone();
1286            match clone.next()? {
1287                Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1288                    end.end = span.end;
1289                    *tokens = clone;
1290                }
1291                Some((_span, Token::Period)) => match clone.next()? {
1292                    Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1293                        end.end = span.end;
1294                        *tokens = clone;
1295                    }
1296                    _ => break Ok(()),
1297                },
1298                _ => break Ok(()),
1299            }
1300        }
1301    }
1302}
1303
1304fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1305    let mut docs = Docs::default();
1306    let mut clone = tokens.clone();
1307    let mut started = false;
1308    while let Some((span, token)) = clone.next_raw()? {
1309        match token {
1310            Token::Whitespace => {}
1311            Token::Comment => {
1312                let comment = tokens.get_span(span);
1313                if !started {
1314                    docs.span.start = span.start;
1315                    started = true;
1316                }
1317                let trailing_ws = comment
1318                    .bytes()
1319                    .rev()
1320                    .take_while(|ch| ch.is_ascii_whitespace())
1321                    .count();
1322                docs.span.end = span.end - (trailing_ws as u32);
1323                docs.docs.push(comment.into());
1324            }
1325            _ => break,
1326        };
1327        *tokens = clone.clone();
1328    }
1329    Ok(docs)
1330}
1331
1332impl<'a> Type<'a> {
1333    fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1334        match tokens.next()? {
1335            Some((span, Token::U8)) => Ok(Type::U8(span)),
1336            Some((span, Token::U16)) => Ok(Type::U16(span)),
1337            Some((span, Token::U32)) => Ok(Type::U32(span)),
1338            Some((span, Token::U64)) => Ok(Type::U64(span)),
1339            Some((span, Token::S8)) => Ok(Type::S8(span)),
1340            Some((span, Token::S16)) => Ok(Type::S16(span)),
1341            Some((span, Token::S32)) => Ok(Type::S32(span)),
1342            Some((span, Token::S64)) => Ok(Type::S64(span)),
1343            Some((span, Token::F32)) => Ok(Type::F32(span)),
1344            Some((span, Token::F64)) => Ok(Type::F64(span)),
1345            Some((span, Token::Char)) => Ok(Type::Char(span)),
1346
1347            // tuple<T, U, ...>
1348            Some((span, Token::Tuple)) => {
1349                let types = parse_list(
1350                    tokens,
1351                    Token::LessThan,
1352                    Token::GreaterThan,
1353                    |_docs, tokens| Type::parse(tokens),
1354                )?;
1355                Ok(Type::Tuple(Tuple { span, types }))
1356            }
1357
1358            Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1359            Some((span, Token::String_)) => Ok(Type::String(span)),
1360
1361            // list<T>
1362            Some((span, Token::List)) => {
1363                tokens.expect(Token::LessThan)?;
1364                let ty = Type::parse(tokens)?;
1365                tokens.expect(Token::GreaterThan)?;
1366                Ok(Type::List(List {
1367                    span,
1368                    ty: Box::new(ty),
1369                }))
1370            }
1371
1372            // option<T>
1373            Some((span, Token::Option_)) => {
1374                tokens.expect(Token::LessThan)?;
1375                let ty = Type::parse(tokens)?;
1376                tokens.expect(Token::GreaterThan)?;
1377                Ok(Type::Option(Option_ {
1378                    span,
1379                    ty: Box::new(ty),
1380                }))
1381            }
1382
1383            // result<T, E>
1384            // result<_, E>
1385            // result<T>
1386            // result
1387            Some((span, Token::Result_)) => {
1388                let mut ok = None;
1389                let mut err = None;
1390
1391                if tokens.eat(Token::LessThan)? {
1392                    if tokens.eat(Token::Underscore)? {
1393                        tokens.expect(Token::Comma)?;
1394                        err = Some(Box::new(Type::parse(tokens)?));
1395                    } else {
1396                        ok = Some(Box::new(Type::parse(tokens)?));
1397                        if tokens.eat(Token::Comma)? {
1398                            err = Some(Box::new(Type::parse(tokens)?));
1399                        }
1400                    };
1401                    tokens.expect(Token::GreaterThan)?;
1402                };
1403                Ok(Type::Result(Result_ { span, ok, err }))
1404            }
1405
1406            // future<T>
1407            // future
1408            Some((span, Token::Future)) => {
1409                let mut ty = None;
1410
1411                if tokens.eat(Token::LessThan)? {
1412                    ty = Some(Box::new(Type::parse(tokens)?));
1413                    tokens.expect(Token::GreaterThan)?;
1414                };
1415                Ok(Type::Future(Future { span, ty }))
1416            }
1417
1418            // stream<T>
1419            // stream
1420            Some((span, Token::Stream)) => {
1421                let mut ty = None;
1422
1423                if tokens.eat(Token::LessThan)? {
1424                    ty = Some(Box::new(Type::parse(tokens)?));
1425                    tokens.expect(Token::GreaterThan)?;
1426                };
1427                Ok(Type::Stream(Stream { span, ty }))
1428            }
1429
1430            // error-context
1431            Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1432
1433            // own<T>
1434            Some((_span, Token::Own)) => {
1435                tokens.expect(Token::LessThan)?;
1436                let resource = parse_id(tokens)?;
1437                tokens.expect(Token::GreaterThan)?;
1438                Ok(Type::Handle(Handle::Own { resource }))
1439            }
1440
1441            // borrow<T>
1442            Some((_span, Token::Borrow)) => {
1443                tokens.expect(Token::LessThan)?;
1444                let resource = parse_id(tokens)?;
1445                tokens.expect(Token::GreaterThan)?;
1446                Ok(Type::Handle(Handle::Borrow { resource }))
1447            }
1448
1449            // `foo`
1450            Some((span, Token::Id)) => Ok(Type::Name(Id {
1451                name: tokens.parse_id(span)?.into(),
1452                span,
1453            })),
1454            // `%foo`
1455            Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1456                name: tokens.parse_explicit_id(span)?.into(),
1457                span,
1458            })),
1459
1460            other => Err(err_expected(tokens, "a type", other).into()),
1461        }
1462    }
1463
1464    fn span(&self) -> Span {
1465        match self {
1466            Type::Bool(span)
1467            | Type::U8(span)
1468            | Type::U16(span)
1469            | Type::U32(span)
1470            | Type::U64(span)
1471            | Type::S8(span)
1472            | Type::S16(span)
1473            | Type::S32(span)
1474            | Type::S64(span)
1475            | Type::F32(span)
1476            | Type::F64(span)
1477            | Type::Char(span)
1478            | Type::String(span)
1479            | Type::ErrorContext(span) => *span,
1480            Type::Name(id) => id.span,
1481            Type::List(l) => l.span,
1482            Type::Handle(h) => h.span(),
1483            Type::Resource(r) => r.span,
1484            Type::Record(r) => r.span,
1485            Type::Flags(f) => f.span,
1486            Type::Variant(v) => v.span,
1487            Type::Tuple(t) => t.span,
1488            Type::Enum(e) => e.span,
1489            Type::Option(o) => o.span,
1490            Type::Result(r) => r.span,
1491            Type::Future(f) => f.span,
1492            Type::Stream(s) => s.span,
1493        }
1494    }
1495}
1496
1497fn parse_list<'a, T>(
1498    tokens: &mut Tokenizer<'a>,
1499    start: Token,
1500    end: Token,
1501    parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1502) -> Result<Vec<T>> {
1503    tokens.expect(start)?;
1504    parse_list_trailer(tokens, end, parse)
1505}
1506
1507fn parse_list_trailer<'a, T>(
1508    tokens: &mut Tokenizer<'a>,
1509    end: Token,
1510    mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1511) -> Result<Vec<T>> {
1512    let mut items = Vec::new();
1513    loop {
1514        // get docs before we skip them to try to eat the end token
1515        let docs = parse_docs(tokens)?;
1516
1517        // if we found an end token then we're done
1518        if tokens.eat(end)? {
1519            break;
1520        }
1521
1522        let item = parse(docs, tokens)?;
1523        items.push(item);
1524
1525        // if there's no trailing comma then this is required to be the end,
1526        // otherwise we go through the loop to try to get another item
1527        if !tokens.eat(Token::Comma)? {
1528            tokens.expect(end)?;
1529            break;
1530        }
1531    }
1532    Ok(items)
1533}
1534
1535fn err_expected(
1536    tokens: &Tokenizer<'_>,
1537    expected: &'static str,
1538    found: Option<(Span, Token)>,
1539) -> Error {
1540    match found {
1541        Some((span, token)) => Error::new(
1542            span,
1543            format!("expected {}, found {}", expected, token.describe()),
1544        ),
1545        None => Error::new(
1546            tokens.eof_span(),
1547            format!("expected {}, found eof", expected),
1548        ),
1549    }
1550}
1551
1552enum Attribute<'a> {
1553    Since { span: Span, version: Version },
1554    Unstable { span: Span, feature: Id<'a> },
1555    Deprecated { span: Span, version: Version },
1556}
1557
1558impl<'a> Attribute<'a> {
1559    fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1560        let mut ret = Vec::new();
1561        while tokens.eat(Token::At)? {
1562            let id = parse_id(tokens)?;
1563            let attr = match id.name {
1564                "since" => {
1565                    tokens.expect(Token::LeftParen)?;
1566                    eat_id(tokens, "version")?;
1567                    tokens.expect(Token::Equals)?;
1568                    let (_span, version) = parse_version(tokens)?;
1569                    tokens.expect(Token::RightParen)?;
1570                    Attribute::Since {
1571                        span: id.span,
1572                        version,
1573                    }
1574                }
1575                "unstable" => {
1576                    tokens.expect(Token::LeftParen)?;
1577                    eat_id(tokens, "feature")?;
1578                    tokens.expect(Token::Equals)?;
1579                    let feature = parse_id(tokens)?;
1580                    tokens.expect(Token::RightParen)?;
1581                    Attribute::Unstable {
1582                        span: id.span,
1583                        feature,
1584                    }
1585                }
1586                "deprecated" => {
1587                    tokens.expect(Token::LeftParen)?;
1588                    eat_id(tokens, "version")?;
1589                    tokens.expect(Token::Equals)?;
1590                    let (_span, version) = parse_version(tokens)?;
1591                    tokens.expect(Token::RightParen)?;
1592                    Attribute::Deprecated {
1593                        span: id.span,
1594                        version,
1595                    }
1596                }
1597                other => {
1598                    bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1599                }
1600            };
1601            ret.push(attr);
1602        }
1603        Ok(ret)
1604    }
1605
1606    fn span(&self) -> Span {
1607        match self {
1608            Attribute::Since { span, .. }
1609            | Attribute::Unstable { span, .. }
1610            | Attribute::Deprecated { span, .. } => *span,
1611        }
1612    }
1613}
1614
1615fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1616    let id = parse_id(tokens)?;
1617    if id.name != expected {
1618        bail!(Error::new(
1619            id.span,
1620            format!("expected `{expected}`, found `{}`", id.name),
1621        ));
1622    }
1623    Ok(id.span)
1624}
1625
1626/// A listing of source files which are used to get parsed into an
1627/// [`UnresolvedPackage`].
1628///
1629/// [`UnresolvedPackage`]: crate::UnresolvedPackage
1630#[derive(Clone, Default)]
1631pub struct SourceMap {
1632    sources: Vec<Source>,
1633    offset: u32,
1634    require_f32_f64: Option<bool>,
1635}
1636
1637#[derive(Clone)]
1638struct Source {
1639    offset: u32,
1640    path: PathBuf,
1641    contents: String,
1642}
1643
1644impl SourceMap {
1645    /// Creates a new empty source map.
1646    pub fn new() -> SourceMap {
1647        SourceMap::default()
1648    }
1649
1650    #[doc(hidden)] // NB: only here for a transitionary period
1651    pub fn set_require_f32_f64(&mut self, enable: bool) {
1652        self.require_f32_f64 = Some(enable);
1653    }
1654
1655    /// Reads the file `path` on the filesystem and appends its contents to this
1656    /// [`SourceMap`].
1657    pub fn push_file(&mut self, path: &Path) -> Result<()> {
1658        let contents = std::fs::read_to_string(path)
1659            .with_context(|| format!("failed to read file {path:?}"))?;
1660        self.push(path, contents);
1661        Ok(())
1662    }
1663
1664    /// Appends the given contents with the given path into this source map.
1665    ///
1666    /// The `path` provided is not read from the filesystem and is instead only
1667    /// used during error messages. Each file added to a [`SourceMap`] is
1668    /// used to create the final parsed package namely by unioning all the
1669    /// interfaces and worlds defined together. Note that each file has its own
1670    /// personal namespace, however, for top-level `use` and such.
1671    pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1672        let mut contents = contents.into();
1673        // Guarantee that there's at least one character in these contents by
1674        // appending a single newline to the end. This is excluded from
1675        // tokenization below so it's only here to ensure that spans which point
1676        // one byte beyond the end of a file (eof) point to the same original
1677        // file.
1678        contents.push('\n');
1679        let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1680        self.sources.push(Source {
1681            offset: self.offset,
1682            path: path.to_path_buf(),
1683            contents,
1684        });
1685        self.offset = new_offset;
1686    }
1687
1688    /// Parses the files added to this source map into a
1689    /// [`UnresolvedPackageGroup`].
1690    pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1691        let mut nested = Vec::new();
1692        let main = self.rewrite_error(|| {
1693            let mut resolver = Resolver::default();
1694            let mut srcs = self.sources.iter().collect::<Vec<_>>();
1695            srcs.sort_by_key(|src| &src.path);
1696
1697            // Parse each source file individually. A tokenizer is created here
1698            // form settings and then `PackageFile` is used to parse the whole
1699            // stream of tokens.
1700            for src in srcs {
1701                let mut tokens = Tokenizer::new(
1702                    // chop off the forcibly appended `\n` character when
1703                    // passing through the source to get tokenized.
1704                    &src.contents[..src.contents.len() - 1],
1705                    src.offset,
1706                    self.require_f32_f64,
1707                )
1708                .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?;
1709                let mut file = PackageFile::parse(&mut tokens)?;
1710
1711                // Filter out any nested packages and resolve them separately.
1712                // Nested packages have only a single "file" so only one item
1713                // is pushed into a `Resolver`. Note that a nested `Resolver`
1714                // is used here, not the outer one.
1715                //
1716                // Note that filtering out `Package` items is required due to
1717                // how the implementation of disallowing nested packages in
1718                // nested packages currently works.
1719                for item in mem::take(&mut file.decl_list.items) {
1720                    match item {
1721                        AstItem::Package(nested_pkg) => {
1722                            let mut resolve = Resolver::default();
1723                            resolve.push(nested_pkg).with_context(|| {
1724                                format!(
1725                                    "failed to handle nested package in: {}",
1726                                    src.path.display()
1727                                )
1728                            })?;
1729
1730                            nested.push(resolve.resolve()?);
1731                        }
1732                        other => file.decl_list.items.push(other),
1733                    }
1734                }
1735
1736                // With nested packages handled push this file into the
1737                // resolver.
1738                resolver.push(file).with_context(|| {
1739                    format!("failed to start resolving path: {}", src.path.display())
1740                })?;
1741            }
1742            Ok(resolver.resolve()?)
1743        })?;
1744        Ok(UnresolvedPackageGroup {
1745            main,
1746            nested,
1747            source_map: self,
1748        })
1749    }
1750
1751    pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1752    where
1753        F: FnOnce() -> Result<T>,
1754    {
1755        let mut err = match f() {
1756            Ok(t) => return Ok(t),
1757            Err(e) => e,
1758        };
1759        if let Some(parse) = err.downcast_mut::<Error>() {
1760            if parse.highlighted.is_none() {
1761                let msg = self.highlight_err(parse.span.start, Some(parse.span.end), &parse.msg);
1762                parse.highlighted = Some(msg);
1763            }
1764        }
1765        if let Some(_) = err.downcast_mut::<Error>() {
1766            return Err(err);
1767        }
1768        if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1769            if notfound.highlighted.is_none() {
1770                let msg = self.highlight_err(
1771                    notfound.span.start,
1772                    Some(notfound.span.end),
1773                    &format!("{notfound}"),
1774                );
1775                notfound.highlighted = Some(msg);
1776            }
1777        }
1778        if let Some(_) = err.downcast_mut::<PackageNotFoundError>() {
1779            return Err(err);
1780        }
1781
1782        if let Some(lex) = err.downcast_ref::<lex::Error>() {
1783            let pos = match lex {
1784                lex::Error::Unexpected(at, _)
1785                | lex::Error::UnterminatedComment(at)
1786                | lex::Error::Wanted { at, .. }
1787                | lex::Error::InvalidCharInId(at, _)
1788                | lex::Error::IdPartEmpty(at)
1789                | lex::Error::InvalidEscape(at, _) => *at,
1790            };
1791            let msg = self.highlight_err(pos, None, lex);
1792            bail!("{msg}")
1793        }
1794
1795        if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1796            if sort.highlighted().is_none() {
1797                let span = match sort {
1798                    toposort::Error::NonexistentDep { span, .. }
1799                    | toposort::Error::Cycle { span, .. } => *span,
1800                };
1801                let highlighted = self.highlight_err(span.start, Some(span.end), &sort);
1802                sort.set_highlighted(highlighted);
1803            }
1804        }
1805
1806        Err(err)
1807    }
1808
1809    fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1810        let src = self.source_for_offset(start);
1811        let start = src.to_relative_offset(start);
1812        let end = end.map(|end| src.to_relative_offset(end));
1813        let (line, col) = src.linecol(start);
1814        let snippet = src.contents.lines().nth(line).unwrap_or("");
1815        let mut msg = format!(
1816            "\
1817{err}
1818     --> {file}:{line}:{col}
1819      |
1820 {line:4} | {snippet}
1821      | {marker:>0$}",
1822            col + 1,
1823            file = src.path.display(),
1824            line = line + 1,
1825            col = col + 1,
1826            marker = "^",
1827        );
1828        if let Some(end) = end {
1829            if let Some(s) = src.contents.get(start..end) {
1830                for _ in s.chars().skip(1) {
1831                    msg.push('-');
1832                }
1833            }
1834        }
1835        return msg;
1836    }
1837
1838    pub(crate) fn render_location(&self, span: Span) -> String {
1839        let src = self.source_for_offset(span.start);
1840        let start = src.to_relative_offset(span.start);
1841        let (line, col) = src.linecol(start);
1842        format!(
1843            "{file}:{line}:{col}",
1844            file = src.path.display(),
1845            line = line + 1,
1846            col = col + 1,
1847        )
1848    }
1849
1850    fn source_for_offset(&self, start: u32) -> &Source {
1851        let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1852            Ok(i) => i,
1853            Err(i) => i - 1,
1854        };
1855        &self.sources[i]
1856    }
1857
1858    /// Returns an iterator over all filenames added to this source map.
1859    pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1860        self.sources.iter().map(|src| src.path.as_path())
1861    }
1862}
1863
1864impl Source {
1865    fn to_relative_offset(&self, offset: u32) -> usize {
1866        usize::try_from(offset - self.offset).unwrap()
1867    }
1868
1869    fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1870        let mut cur = 0;
1871        // Use split_terminator instead of lines so that if there is a `\r`,
1872        // it is included in the offset calculation. The `+1` values below
1873        // account for the `\n`.
1874        for (i, line) in self.contents.split_terminator('\n').enumerate() {
1875            if cur + line.len() + 1 > relative_offset {
1876                return (i, relative_offset - cur);
1877            }
1878            cur += line.len() + 1;
1879        }
1880        (self.contents.lines().count(), 0)
1881    }
1882}
1883
1884pub enum ParsedUsePath {
1885    Name(String),
1886    Package(crate::PackageName, String),
1887}
1888
1889pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1890    let mut tokens = Tokenizer::new(s, 0, None)?;
1891    let path = UsePath::parse(&mut tokens)?;
1892    if tokens.next()?.is_some() {
1893        bail!("trailing tokens in path specifier");
1894    }
1895    Ok(match path {
1896        UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1897        UsePath::Package { id, name } => {
1898            ParsedUsePath::Package(id.package_name(), name.name.to_string())
1899        }
1900    })
1901}