1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use anyhow::{bail, Context, Result};
3use lex::{Span, Token, Tokenizer};
4use semver::Version;
5use std::borrow::Cow;
6use std::fmt;
7use std::mem;
8use std::path::{Path, PathBuf};
9
10pub mod lex;
11
12pub use resolve::Resolver;
13mod resolve;
14pub mod toposort;
15
16pub use lex::validate_id;
17
18struct PackageFile<'a> {
20 package_id: Option<PackageName<'a>>,
22 decl_list: DeclList<'a>,
24}
25
26impl<'a> PackageFile<'a> {
27 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
32 let mut package_name_tokens_peek = tokens.clone();
33 let docs = parse_docs(&mut package_name_tokens_peek)?;
34
35 let package_id = if package_name_tokens_peek.eat(Token::Package)? {
38 let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
39 if package_name_tokens_peek.eat(Token::Semicolon)? {
40 *tokens = package_name_tokens_peek;
41 Some(name)
42 } else {
43 None
44 }
45 } else {
46 None
47 };
48 let decl_list = DeclList::parse_until(tokens, None)?;
49 Ok(PackageFile {
50 package_id,
51 decl_list,
52 })
53 }
54
55 fn parse_nested(
57 tokens: &mut Tokenizer<'a>,
58 docs: Docs<'a>,
59 attributes: Vec<Attribute<'a>>,
60 ) -> Result<Self> {
61 let span = tokens.expect(Token::Package)?;
62 if !attributes.is_empty() {
63 bail!(Error::new(
64 span,
65 format!("cannot place attributes on nested packages"),
66 ));
67 }
68 let package_id = PackageName::parse(tokens, docs)?;
69 tokens.expect(Token::LeftBrace)?;
70 let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
71 Ok(PackageFile {
72 package_id: Some(package_id),
73 decl_list,
74 })
75 }
76}
77
78#[derive(Default)]
114pub struct DeclList<'a> {
115 items: Vec<AstItem<'a>>,
116}
117
118impl<'a> DeclList<'a> {
119 fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
120 let mut items = Vec::new();
121 let mut docs = parse_docs(tokens)?;
122 loop {
123 match end {
124 Some(end) => {
125 if tokens.eat(end)? {
126 break;
127 }
128 }
129 None => {
130 if tokens.clone().next()?.is_none() {
131 break;
132 }
133 }
134 }
135 items.push(AstItem::parse(tokens, docs)?);
136 docs = parse_docs(tokens)?;
137 }
138 Ok(DeclList { items })
139 }
140
141 fn for_each_path<'b>(
142 &'b self,
143 f: &mut dyn FnMut(
144 Option<&'b Id<'a>>,
145 &'b [Attribute<'a>],
146 &'b UsePath<'a>,
147 Option<&'b [UseName<'a>]>,
148 WorldOrInterface,
149 ) -> Result<()>,
150 ) -> Result<()> {
151 for item in self.items.iter() {
152 match item {
153 AstItem::World(world) => {
154 let mut imports = Vec::new();
159 let mut exports = Vec::new();
160 for item in world.items.iter() {
161 match item {
162 WorldItem::Use(u) => f(
163 None,
164 &u.attributes,
165 &u.from,
166 Some(&u.names),
167 WorldOrInterface::Interface,
168 )?,
169 WorldItem::Include(i) => f(
170 Some(&world.name),
171 &i.attributes,
172 &i.from,
173 None,
174 WorldOrInterface::World,
175 )?,
176 WorldItem::Type(_) => {}
177 WorldItem::Import(Import {
178 kind, attributes, ..
179 }) => imports.push((kind, attributes)),
180 WorldItem::Export(Export {
181 kind, attributes, ..
182 }) => exports.push((kind, attributes)),
183 }
184 }
185
186 let mut visit_kind =
187 |kind: &'b ExternKind<'a>, attrs: &'b [Attribute<'a>]| match kind {
188 ExternKind::Interface(_, items) => {
189 for item in items {
190 match item {
191 InterfaceItem::Use(u) => f(
192 None,
193 &u.attributes,
194 &u.from,
195 Some(&u.names),
196 WorldOrInterface::Interface,
197 )?,
198 _ => {}
199 }
200 }
201 Ok(())
202 }
203 ExternKind::Path(path) => {
204 f(None, attrs, path, None, WorldOrInterface::Interface)
205 }
206 ExternKind::Func(..) => Ok(()),
207 };
208
209 for (kind, attrs) in imports {
210 visit_kind(kind, attrs)?;
211 }
212 for (kind, attrs) in exports {
213 visit_kind(kind, attrs)?;
214 }
215 }
216 AstItem::Interface(i) => {
217 for item in i.items.iter() {
218 match item {
219 InterfaceItem::Use(u) => f(
220 Some(&i.name),
221 &u.attributes,
222 &u.from,
223 Some(&u.names),
224 WorldOrInterface::Interface,
225 )?,
226 _ => {}
227 }
228 }
229 }
230 AstItem::Use(u) => {
231 f(
234 None,
235 &u.attributes,
236 &u.item,
237 None,
238 WorldOrInterface::Unknown,
239 )?;
240 }
241
242 AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
243 }
244 }
245 Ok(())
246 }
247}
248
249enum AstItem<'a> {
250 Interface(Interface<'a>),
251 World(World<'a>),
252 Use(ToplevelUse<'a>),
253 Package(PackageFile<'a>),
254}
255
256impl<'a> AstItem<'a> {
257 fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
258 let attributes = Attribute::parse_list(tokens)?;
259 match tokens.clone().next()? {
260 Some((_span, Token::Interface)) => {
261 Interface::parse(tokens, docs, attributes).map(Self::Interface)
262 }
263 Some((_span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
264 Some((_span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
265 Some((_span, Token::Package)) => {
266 PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
267 }
268 other => Err(err_expected(tokens, "`world`, `interface` or `use`", other).into()),
269 }
270 }
271}
272
273#[derive(Debug, Clone)]
274struct PackageName<'a> {
275 docs: Docs<'a>,
276 span: Span,
277 namespace: Id<'a>,
278 name: Id<'a>,
279 version: Option<(Span, Version)>,
280}
281
282impl<'a> PackageName<'a> {
283 fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
284 let namespace = parse_id(tokens)?;
285 tokens.expect(Token::Colon)?;
286 let name = parse_id(tokens)?;
287 let version = parse_opt_version(tokens)?;
288 Ok(PackageName {
289 docs,
290 span: Span {
291 start: namespace.span.start,
292 end: version
293 .as_ref()
294 .map(|(s, _)| s.end)
295 .unwrap_or(name.span.end),
296 },
297 namespace,
298 name,
299 version,
300 })
301 }
302
303 fn package_name(&self) -> crate::PackageName {
304 crate::PackageName {
305 namespace: self.namespace.name.to_string(),
306 name: self.name.name.to_string(),
307 version: self.version.as_ref().map(|(_, v)| v.clone()),
308 }
309 }
310}
311
312struct ToplevelUse<'a> {
313 span: Span,
314 attributes: Vec<Attribute<'a>>,
315 item: UsePath<'a>,
316 as_: Option<Id<'a>>,
317}
318
319impl<'a> ToplevelUse<'a> {
320 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
321 let span = tokens.expect(Token::Use)?;
322 let item = UsePath::parse(tokens)?;
323 let as_ = if tokens.eat(Token::As)? {
324 Some(parse_id(tokens)?)
325 } else {
326 None
327 };
328 tokens.expect_semicolon()?;
329 Ok(ToplevelUse {
330 span,
331 attributes,
332 item,
333 as_,
334 })
335 }
336}
337
338struct World<'a> {
339 docs: Docs<'a>,
340 attributes: Vec<Attribute<'a>>,
341 name: Id<'a>,
342 items: Vec<WorldItem<'a>>,
343}
344
345impl<'a> World<'a> {
346 fn parse(
347 tokens: &mut Tokenizer<'a>,
348 docs: Docs<'a>,
349 attributes: Vec<Attribute<'a>>,
350 ) -> Result<Self> {
351 tokens.expect(Token::World)?;
352 let name = parse_id(tokens)?;
353 let items = Self::parse_items(tokens)?;
354 Ok(World {
355 docs,
356 attributes,
357 name,
358 items,
359 })
360 }
361
362 fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
363 tokens.expect(Token::LeftBrace)?;
364 let mut items = Vec::new();
365 loop {
366 let docs = parse_docs(tokens)?;
367 if tokens.eat(Token::RightBrace)? {
368 break;
369 }
370 let attributes = Attribute::parse_list(tokens)?;
371 items.push(WorldItem::parse(tokens, docs, attributes)?);
372 }
373 Ok(items)
374 }
375}
376
377enum WorldItem<'a> {
378 Import(Import<'a>),
379 Export(Export<'a>),
380 Use(Use<'a>),
381 Type(TypeDef<'a>),
382 Include(Include<'a>),
383}
384
385impl<'a> WorldItem<'a> {
386 fn parse(
387 tokens: &mut Tokenizer<'a>,
388 docs: Docs<'a>,
389 attributes: Vec<Attribute<'a>>,
390 ) -> Result<WorldItem<'a>> {
391 match tokens.clone().next()? {
392 Some((_span, Token::Import)) => {
393 Import::parse(tokens, docs, attributes).map(WorldItem::Import)
394 }
395 Some((_span, Token::Export)) => {
396 Export::parse(tokens, docs, attributes).map(WorldItem::Export)
397 }
398 Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
399 Some((_span, Token::Type)) => {
400 TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
401 }
402 Some((_span, Token::Flags)) => {
403 TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
404 }
405 Some((_span, Token::Resource)) => {
406 TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
407 }
408 Some((_span, Token::Record)) => {
409 TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
410 }
411 Some((_span, Token::Variant)) => {
412 TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
413 }
414 Some((_span, Token::Enum)) => {
415 TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
416 }
417 Some((_span, Token::Include)) => {
418 Include::parse(tokens, attributes).map(WorldItem::Include)
419 }
420 other => Err(err_expected(
421 tokens,
422 "`import`, `export`, `include`, `use`, or type definition",
423 other,
424 )
425 .into()),
426 }
427 }
428}
429
430struct Import<'a> {
431 docs: Docs<'a>,
432 attributes: Vec<Attribute<'a>>,
433 kind: ExternKind<'a>,
434}
435
436impl<'a> Import<'a> {
437 fn parse(
438 tokens: &mut Tokenizer<'a>,
439 docs: Docs<'a>,
440 attributes: Vec<Attribute<'a>>,
441 ) -> Result<Import<'a>> {
442 tokens.expect(Token::Import)?;
443 let kind = ExternKind::parse(tokens)?;
444 Ok(Import {
445 docs,
446 attributes,
447 kind,
448 })
449 }
450}
451
452struct Export<'a> {
453 docs: Docs<'a>,
454 attributes: Vec<Attribute<'a>>,
455 kind: ExternKind<'a>,
456}
457
458impl<'a> Export<'a> {
459 fn parse(
460 tokens: &mut Tokenizer<'a>,
461 docs: Docs<'a>,
462 attributes: Vec<Attribute<'a>>,
463 ) -> Result<Export<'a>> {
464 tokens.expect(Token::Export)?;
465 let kind = ExternKind::parse(tokens)?;
466 Ok(Export {
467 docs,
468 attributes,
469 kind,
470 })
471 }
472}
473
474enum ExternKind<'a> {
475 Interface(Id<'a>, Vec<InterfaceItem<'a>>),
476 Path(UsePath<'a>),
477 Func(Id<'a>, Func<'a>),
478}
479
480impl<'a> ExternKind<'a> {
481 fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
482 let mut clone = tokens.clone();
491 let id = parse_id(&mut clone)?;
492 if clone.eat(Token::Colon)? {
493 if clone.clone().eat(Token::Func)? || clone.clone().eat(Token::Async)? {
495 *tokens = clone;
496 let ret = ExternKind::Func(id, Func::parse(tokens)?);
497 tokens.expect_semicolon()?;
498 return Ok(ret);
499 }
500
501 if clone.eat(Token::Interface)? {
503 *tokens = clone;
504 return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
505 }
506 }
507
508 let ret = ExternKind::Path(UsePath::parse(tokens)?);
512 tokens.expect_semicolon()?;
513 Ok(ret)
514 }
515
516 fn span(&self) -> Span {
517 match self {
518 ExternKind::Interface(id, _) => id.span,
519 ExternKind::Path(UsePath::Id(id)) => id.span,
520 ExternKind::Path(UsePath::Package { name, .. }) => name.span,
521 ExternKind::Func(id, _) => id.span,
522 }
523 }
524}
525
526struct Interface<'a> {
527 docs: Docs<'a>,
528 attributes: Vec<Attribute<'a>>,
529 name: Id<'a>,
530 items: Vec<InterfaceItem<'a>>,
531}
532
533impl<'a> Interface<'a> {
534 fn parse(
535 tokens: &mut Tokenizer<'a>,
536 docs: Docs<'a>,
537 attributes: Vec<Attribute<'a>>,
538 ) -> Result<Self> {
539 tokens.expect(Token::Interface)?;
540 let name = parse_id(tokens)?;
541 let items = Self::parse_items(tokens)?;
542 Ok(Interface {
543 docs,
544 attributes,
545 name,
546 items,
547 })
548 }
549
550 pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
551 tokens.expect(Token::LeftBrace)?;
552 let mut items = Vec::new();
553 loop {
554 let docs = parse_docs(tokens)?;
555 if tokens.eat(Token::RightBrace)? {
556 break;
557 }
558 let attributes = Attribute::parse_list(tokens)?;
559 items.push(InterfaceItem::parse(tokens, docs, attributes)?);
560 }
561 Ok(items)
562 }
563}
564
565#[derive(Debug)]
566pub enum WorldOrInterface {
567 World,
568 Interface,
569 Unknown,
570}
571
572enum InterfaceItem<'a> {
573 TypeDef(TypeDef<'a>),
574 Func(NamedFunc<'a>),
575 Use(Use<'a>),
576}
577
578struct Use<'a> {
579 attributes: Vec<Attribute<'a>>,
580 from: UsePath<'a>,
581 names: Vec<UseName<'a>>,
582}
583
584#[derive(Debug)]
585enum UsePath<'a> {
586 Id(Id<'a>),
587 Package { id: PackageName<'a>, name: Id<'a> },
588}
589
590impl<'a> UsePath<'a> {
591 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
592 let id = parse_id(tokens)?;
593 if tokens.eat(Token::Colon)? {
594 let namespace = id;
596 let pkg_name = parse_id(tokens)?;
597 tokens.expect(Token::Slash)?;
598 let name = parse_id(tokens)?;
599 let version = parse_opt_version(tokens)?;
600 Ok(UsePath::Package {
601 id: PackageName {
602 docs: Default::default(),
603 span: Span {
604 start: namespace.span.start,
605 end: pkg_name.span.end,
606 },
607 namespace,
608 name: pkg_name,
609 version,
610 },
611 name,
612 })
613 } else {
614 Ok(UsePath::Id(id))
616 }
617 }
618
619 fn name(&self) -> &Id<'a> {
620 match self {
621 UsePath::Id(id) => id,
622 UsePath::Package { name, .. } => name,
623 }
624 }
625}
626
627struct UseName<'a> {
628 name: Id<'a>,
629 as_: Option<Id<'a>>,
630}
631
632impl<'a> Use<'a> {
633 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
634 tokens.expect(Token::Use)?;
635 let from = UsePath::parse(tokens)?;
636 tokens.expect(Token::Period)?;
637 tokens.expect(Token::LeftBrace)?;
638
639 let mut names = Vec::new();
640 while !tokens.eat(Token::RightBrace)? {
641 let mut name = UseName {
642 name: parse_id(tokens)?,
643 as_: None,
644 };
645 if tokens.eat(Token::As)? {
646 name.as_ = Some(parse_id(tokens)?);
647 }
648 names.push(name);
649 if !tokens.eat(Token::Comma)? {
650 tokens.expect(Token::RightBrace)?;
651 break;
652 }
653 }
654 tokens.expect_semicolon()?;
655 Ok(Use {
656 attributes,
657 from,
658 names,
659 })
660 }
661}
662
663struct Include<'a> {
664 from: UsePath<'a>,
665 attributes: Vec<Attribute<'a>>,
666 names: Vec<IncludeName<'a>>,
667}
668
669struct IncludeName<'a> {
670 name: Id<'a>,
671 as_: Id<'a>,
672}
673
674impl<'a> Include<'a> {
675 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
676 tokens.expect(Token::Include)?;
677 let from = UsePath::parse(tokens)?;
678
679 let names = if tokens.eat(Token::With)? {
680 parse_list(
681 tokens,
682 Token::LeftBrace,
683 Token::RightBrace,
684 |_docs, tokens| {
685 let name = parse_id(tokens)?;
686 tokens.expect(Token::As)?;
687 let as_ = parse_id(tokens)?;
688 Ok(IncludeName { name, as_ })
689 },
690 )?
691 } else {
692 tokens.expect_semicolon()?;
693 Vec::new()
694 };
695
696 Ok(Include {
697 attributes,
698 from,
699 names,
700 })
701 }
702}
703
704#[derive(Debug, Clone)]
705pub struct Id<'a> {
706 name: &'a str,
707 span: Span,
708}
709
710impl<'a> From<&'a str> for Id<'a> {
711 fn from(s: &'a str) -> Id<'a> {
712 Id {
713 name: s.into(),
714 span: Span { start: 0, end: 0 },
715 }
716 }
717}
718
719#[derive(Debug, Clone)]
720pub struct Docs<'a> {
721 docs: Vec<Cow<'a, str>>,
722 span: Span,
723}
724
725impl<'a> Default for Docs<'a> {
726 fn default() -> Self {
727 Self {
728 docs: Default::default(),
729 span: Span { start: 0, end: 0 },
730 }
731 }
732}
733
734struct TypeDef<'a> {
735 docs: Docs<'a>,
736 attributes: Vec<Attribute<'a>>,
737 name: Id<'a>,
738 ty: Type<'a>,
739}
740
741enum Type<'a> {
742 Bool(Span),
743 U8(Span),
744 U16(Span),
745 U32(Span),
746 U64(Span),
747 S8(Span),
748 S16(Span),
749 S32(Span),
750 S64(Span),
751 F32(Span),
752 F64(Span),
753 Char(Span),
754 String(Span),
755 Name(Id<'a>),
756 List(List<'a>),
757 Handle(Handle<'a>),
758 Resource(Resource<'a>),
759 Record(Record<'a>),
760 Flags(Flags<'a>),
761 Variant(Variant<'a>),
762 Tuple(Tuple<'a>),
763 Enum(Enum<'a>),
764 Option(Option_<'a>),
765 Result(Result_<'a>),
766 Future(Future<'a>),
767 Stream(Stream<'a>),
768 ErrorContext(Span),
769}
770
771enum Handle<'a> {
772 Own { resource: Id<'a> },
773 Borrow { resource: Id<'a> },
774}
775
776impl Handle<'_> {
777 fn span(&self) -> Span {
778 match self {
779 Handle::Own { resource } | Handle::Borrow { resource } => resource.span,
780 }
781 }
782}
783
784struct Resource<'a> {
785 span: Span,
786 funcs: Vec<ResourceFunc<'a>>,
787}
788
789enum ResourceFunc<'a> {
790 Method(NamedFunc<'a>),
791 Static(NamedFunc<'a>),
792 Constructor(NamedFunc<'a>),
793}
794
795impl<'a> ResourceFunc<'a> {
796 fn parse(
797 docs: Docs<'a>,
798 attributes: Vec<Attribute<'a>>,
799 tokens: &mut Tokenizer<'a>,
800 ) -> Result<Self> {
801 match tokens.clone().next()? {
802 Some((span, Token::Constructor)) => {
803 tokens.expect(Token::Constructor)?;
804 tokens.expect(Token::LeftParen)?;
805 let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
806 let name = parse_id(tokens)?;
807 tokens.expect(Token::Colon)?;
808 let ty = Type::parse(tokens)?;
809 Ok((name, ty))
810 })?;
811 tokens.expect_semicolon()?;
812 Ok(ResourceFunc::Constructor(NamedFunc {
813 docs,
814 attributes,
815 name: Id {
816 span,
817 name: "constructor",
818 },
819 func: Func {
820 span,
821 async_: false,
822 params,
823 result: None,
824 },
825 }))
826 }
827 Some((_span, Token::Id | Token::ExplicitId)) => {
828 let name = parse_id(tokens)?;
829 tokens.expect(Token::Colon)?;
830 let ctor = if tokens.eat(Token::Static)? {
831 ResourceFunc::Static
832 } else {
833 ResourceFunc::Method
834 };
835 let func = Func::parse(tokens)?;
836 tokens.expect_semicolon()?;
837 Ok(ctor(NamedFunc {
838 docs,
839 attributes,
840 name,
841 func,
842 }))
843 }
844 other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
845 }
846 }
847
848 fn named_func(&self) -> &NamedFunc<'a> {
849 use ResourceFunc::*;
850 match self {
851 Method(f) | Static(f) | Constructor(f) => f,
852 }
853 }
854}
855
856struct Record<'a> {
857 span: Span,
858 fields: Vec<Field<'a>>,
859}
860
861struct Field<'a> {
862 docs: Docs<'a>,
863 name: Id<'a>,
864 ty: Type<'a>,
865}
866
867struct Flags<'a> {
868 span: Span,
869 flags: Vec<Flag<'a>>,
870}
871
872struct Flag<'a> {
873 docs: Docs<'a>,
874 name: Id<'a>,
875}
876
877struct Variant<'a> {
878 span: Span,
879 cases: Vec<Case<'a>>,
880}
881
882struct Case<'a> {
883 docs: Docs<'a>,
884 name: Id<'a>,
885 ty: Option<Type<'a>>,
886}
887
888struct Enum<'a> {
889 span: Span,
890 cases: Vec<EnumCase<'a>>,
891}
892
893struct EnumCase<'a> {
894 docs: Docs<'a>,
895 name: Id<'a>,
896}
897
898struct Option_<'a> {
899 span: Span,
900 ty: Box<Type<'a>>,
901}
902
903struct List<'a> {
904 span: Span,
905 ty: Box<Type<'a>>,
906}
907
908struct Future<'a> {
909 span: Span,
910 ty: Option<Box<Type<'a>>>,
911}
912
913struct Tuple<'a> {
914 span: Span,
915 types: Vec<Type<'a>>,
916}
917
918struct Result_<'a> {
919 span: Span,
920 ok: Option<Box<Type<'a>>>,
921 err: Option<Box<Type<'a>>>,
922}
923
924struct Stream<'a> {
925 span: Span,
926 ty: Option<Box<Type<'a>>>,
927}
928
929struct NamedFunc<'a> {
930 docs: Docs<'a>,
931 attributes: Vec<Attribute<'a>>,
932 name: Id<'a>,
933 func: Func<'a>,
934}
935
936type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
937
938struct Func<'a> {
939 span: Span,
940 async_: bool,
941 params: ParamList<'a>,
942 result: Option<Type<'a>>,
943}
944
945impl<'a> Func<'a> {
946 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
947 fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
948 if left_paren {
949 tokens.expect(Token::LeftParen)?;
950 };
951 parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
952 let name = parse_id(tokens)?;
953 tokens.expect(Token::Colon)?;
954 let ty = Type::parse(tokens)?;
955 Ok((name, ty))
956 })
957 }
958
959 let async_ = tokens.eat(Token::Async)?;
960 let span = tokens.expect(Token::Func)?;
961 let params = parse_params(tokens, true)?;
962 let result = if tokens.eat(Token::RArrow)? {
963 let ty = Type::parse(tokens)?;
964 Some(ty)
965 } else {
966 None
967 };
968 Ok(Func {
969 span,
970 async_,
971 params,
972 result,
973 })
974 }
975}
976
977impl<'a> InterfaceItem<'a> {
978 fn parse(
979 tokens: &mut Tokenizer<'a>,
980 docs: Docs<'a>,
981 attributes: Vec<Attribute<'a>>,
982 ) -> Result<InterfaceItem<'a>> {
983 match tokens.clone().next()? {
984 Some((_span, Token::Type)) => {
985 TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
986 }
987 Some((_span, Token::Flags)) => {
988 TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
989 }
990 Some((_span, Token::Enum)) => {
991 TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
992 }
993 Some((_span, Token::Variant)) => {
994 TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
995 }
996 Some((_span, Token::Resource)) => {
997 TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
998 }
999 Some((_span, Token::Record)) => {
1000 TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
1001 }
1002 Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
1003 NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
1004 }
1005 Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
1006 other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
1007 }
1008 }
1009}
1010
1011impl<'a> TypeDef<'a> {
1012 fn parse(
1013 tokens: &mut Tokenizer<'a>,
1014 docs: Docs<'a>,
1015 attributes: Vec<Attribute<'a>>,
1016 ) -> Result<Self> {
1017 tokens.expect(Token::Type)?;
1018 let name = parse_id(tokens)?;
1019 tokens.expect(Token::Equals)?;
1020 let ty = Type::parse(tokens)?;
1021 tokens.expect_semicolon()?;
1022 Ok(TypeDef {
1023 docs,
1024 attributes,
1025 name,
1026 ty,
1027 })
1028 }
1029
1030 fn parse_flags(
1031 tokens: &mut Tokenizer<'a>,
1032 docs: Docs<'a>,
1033 attributes: Vec<Attribute<'a>>,
1034 ) -> Result<Self> {
1035 tokens.expect(Token::Flags)?;
1036 let name = parse_id(tokens)?;
1037 let ty = Type::Flags(Flags {
1038 span: name.span,
1039 flags: parse_list(
1040 tokens,
1041 Token::LeftBrace,
1042 Token::RightBrace,
1043 |docs, tokens| {
1044 let name = parse_id(tokens)?;
1045 Ok(Flag { docs, name })
1046 },
1047 )?,
1048 });
1049 Ok(TypeDef {
1050 docs,
1051 attributes,
1052 name,
1053 ty,
1054 })
1055 }
1056
1057 fn parse_resource(
1058 tokens: &mut Tokenizer<'a>,
1059 docs: Docs<'a>,
1060 attributes: Vec<Attribute<'a>>,
1061 ) -> Result<Self> {
1062 tokens.expect(Token::Resource)?;
1063 let name = parse_id(tokens)?;
1064 let mut funcs = Vec::new();
1065 if tokens.eat(Token::LeftBrace)? {
1066 while !tokens.eat(Token::RightBrace)? {
1067 let docs = parse_docs(tokens)?;
1068 let attributes = Attribute::parse_list(tokens)?;
1069 funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1070 }
1071 } else {
1072 tokens.expect_semicolon()?;
1073 }
1074 let ty = Type::Resource(Resource {
1075 span: name.span,
1076 funcs,
1077 });
1078 Ok(TypeDef {
1079 docs,
1080 attributes,
1081 name,
1082 ty,
1083 })
1084 }
1085
1086 fn parse_record(
1087 tokens: &mut Tokenizer<'a>,
1088 docs: Docs<'a>,
1089 attributes: Vec<Attribute<'a>>,
1090 ) -> Result<Self> {
1091 tokens.expect(Token::Record)?;
1092 let name = parse_id(tokens)?;
1093 let ty = Type::Record(Record {
1094 span: name.span,
1095 fields: parse_list(
1096 tokens,
1097 Token::LeftBrace,
1098 Token::RightBrace,
1099 |docs, tokens| {
1100 let name = parse_id(tokens)?;
1101 tokens.expect(Token::Colon)?;
1102 let ty = Type::parse(tokens)?;
1103 Ok(Field { docs, name, ty })
1104 },
1105 )?,
1106 });
1107 Ok(TypeDef {
1108 docs,
1109 attributes,
1110 name,
1111 ty,
1112 })
1113 }
1114
1115 fn parse_variant(
1116 tokens: &mut Tokenizer<'a>,
1117 docs: Docs<'a>,
1118 attributes: Vec<Attribute<'a>>,
1119 ) -> Result<Self> {
1120 tokens.expect(Token::Variant)?;
1121 let name = parse_id(tokens)?;
1122 let ty = Type::Variant(Variant {
1123 span: name.span,
1124 cases: parse_list(
1125 tokens,
1126 Token::LeftBrace,
1127 Token::RightBrace,
1128 |docs, tokens| {
1129 let name = parse_id(tokens)?;
1130 let ty = if tokens.eat(Token::LeftParen)? {
1131 let ty = Type::parse(tokens)?;
1132 tokens.expect(Token::RightParen)?;
1133 Some(ty)
1134 } else {
1135 None
1136 };
1137 Ok(Case { docs, name, ty })
1138 },
1139 )?,
1140 });
1141 Ok(TypeDef {
1142 docs,
1143 attributes,
1144 name,
1145 ty,
1146 })
1147 }
1148
1149 fn parse_enum(
1150 tokens: &mut Tokenizer<'a>,
1151 docs: Docs<'a>,
1152 attributes: Vec<Attribute<'a>>,
1153 ) -> Result<Self> {
1154 tokens.expect(Token::Enum)?;
1155 let name = parse_id(tokens)?;
1156 let ty = Type::Enum(Enum {
1157 span: name.span,
1158 cases: parse_list(
1159 tokens,
1160 Token::LeftBrace,
1161 Token::RightBrace,
1162 |docs, tokens| {
1163 let name = parse_id(tokens)?;
1164 Ok(EnumCase { docs, name })
1165 },
1166 )?,
1167 });
1168 Ok(TypeDef {
1169 docs,
1170 attributes,
1171 name,
1172 ty,
1173 })
1174 }
1175}
1176
1177impl<'a> NamedFunc<'a> {
1178 fn parse(
1179 tokens: &mut Tokenizer<'a>,
1180 docs: Docs<'a>,
1181 attributes: Vec<Attribute<'a>>,
1182 ) -> Result<Self> {
1183 let name = parse_id(tokens)?;
1184 tokens.expect(Token::Colon)?;
1185 let func = Func::parse(tokens)?;
1186 tokens.expect_semicolon()?;
1187 Ok(NamedFunc {
1188 docs,
1189 attributes,
1190 name,
1191 func,
1192 })
1193 }
1194}
1195
1196fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1197 match tokens.next()? {
1198 Some((span, Token::Id)) => Ok(Id {
1199 name: tokens.parse_id(span)?,
1200 span,
1201 }),
1202 Some((span, Token::ExplicitId)) => Ok(Id {
1203 name: tokens.parse_explicit_id(span)?,
1204 span,
1205 }),
1206 other => Err(err_expected(tokens, "an identifier or string", other).into()),
1207 }
1208}
1209
1210fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1211 if tokens.eat(Token::At)? {
1212 parse_version(tokens).map(Some)
1213 } else {
1214 Ok(None)
1215 }
1216}
1217
1218fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1219 let start = tokens.expect(Token::Integer)?.start;
1220 tokens.expect(Token::Period)?;
1221 tokens.expect(Token::Integer)?;
1222 tokens.expect(Token::Period)?;
1223 let end = tokens.expect(Token::Integer)?.end;
1224 let mut span = Span { start, end };
1225 eat_ids(tokens, Token::Minus, &mut span)?;
1226 eat_ids(tokens, Token::Plus, &mut span)?;
1227 let string = tokens.get_span(span);
1228 let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1229 return Ok((span, version));
1230
1231 fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1285 if !tokens.eat(prefix)? {
1286 return Ok(());
1287 }
1288 loop {
1289 let mut clone = tokens.clone();
1290 match clone.next()? {
1291 Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1292 end.end = span.end;
1293 *tokens = clone;
1294 }
1295 Some((_span, Token::Period)) => match clone.next()? {
1296 Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1297 end.end = span.end;
1298 *tokens = clone;
1299 }
1300 _ => break Ok(()),
1301 },
1302 _ => break Ok(()),
1303 }
1304 }
1305 }
1306}
1307
1308fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1309 let mut docs = Docs::default();
1310 let mut clone = tokens.clone();
1311 let mut started = false;
1312 while let Some((span, token)) = clone.next_raw()? {
1313 match token {
1314 Token::Whitespace => {}
1315 Token::Comment => {
1316 let comment = tokens.get_span(span);
1317 if !started {
1318 docs.span.start = span.start;
1319 started = true;
1320 }
1321 let trailing_ws = comment
1322 .bytes()
1323 .rev()
1324 .take_while(|ch| ch.is_ascii_whitespace())
1325 .count();
1326 docs.span.end = span.end - (trailing_ws as u32);
1327 docs.docs.push(comment.into());
1328 }
1329 _ => break,
1330 };
1331 *tokens = clone.clone();
1332 }
1333 Ok(docs)
1334}
1335
1336impl<'a> Type<'a> {
1337 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1338 match tokens.next()? {
1339 Some((span, Token::U8)) => Ok(Type::U8(span)),
1340 Some((span, Token::U16)) => Ok(Type::U16(span)),
1341 Some((span, Token::U32)) => Ok(Type::U32(span)),
1342 Some((span, Token::U64)) => Ok(Type::U64(span)),
1343 Some((span, Token::S8)) => Ok(Type::S8(span)),
1344 Some((span, Token::S16)) => Ok(Type::S16(span)),
1345 Some((span, Token::S32)) => Ok(Type::S32(span)),
1346 Some((span, Token::S64)) => Ok(Type::S64(span)),
1347 Some((span, Token::F32)) => Ok(Type::F32(span)),
1348 Some((span, Token::F64)) => Ok(Type::F64(span)),
1349 Some((span, Token::Char)) => Ok(Type::Char(span)),
1350
1351 Some((span, Token::Tuple)) => {
1353 let types = parse_list(
1354 tokens,
1355 Token::LessThan,
1356 Token::GreaterThan,
1357 |_docs, tokens| Type::parse(tokens),
1358 )?;
1359 Ok(Type::Tuple(Tuple { span, types }))
1360 }
1361
1362 Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1363 Some((span, Token::String_)) => Ok(Type::String(span)),
1364
1365 Some((span, Token::List)) => {
1367 tokens.expect(Token::LessThan)?;
1368 let ty = Type::parse(tokens)?;
1369 tokens.expect(Token::GreaterThan)?;
1370 Ok(Type::List(List {
1371 span,
1372 ty: Box::new(ty),
1373 }))
1374 }
1375
1376 Some((span, Token::Option_)) => {
1378 tokens.expect(Token::LessThan)?;
1379 let ty = Type::parse(tokens)?;
1380 tokens.expect(Token::GreaterThan)?;
1381 Ok(Type::Option(Option_ {
1382 span,
1383 ty: Box::new(ty),
1384 }))
1385 }
1386
1387 Some((span, Token::Result_)) => {
1392 let mut ok = None;
1393 let mut err = None;
1394
1395 if tokens.eat(Token::LessThan)? {
1396 if tokens.eat(Token::Underscore)? {
1397 tokens.expect(Token::Comma)?;
1398 err = Some(Box::new(Type::parse(tokens)?));
1399 } else {
1400 ok = Some(Box::new(Type::parse(tokens)?));
1401 if tokens.eat(Token::Comma)? {
1402 err = Some(Box::new(Type::parse(tokens)?));
1403 }
1404 };
1405 tokens.expect(Token::GreaterThan)?;
1406 };
1407 Ok(Type::Result(Result_ { span, ok, err }))
1408 }
1409
1410 Some((span, Token::Future)) => {
1413 let mut ty = None;
1414
1415 if tokens.eat(Token::LessThan)? {
1416 ty = Some(Box::new(Type::parse(tokens)?));
1417 tokens.expect(Token::GreaterThan)?;
1418 };
1419 Ok(Type::Future(Future { span, ty }))
1420 }
1421
1422 Some((span, Token::Stream)) => {
1425 let mut ty = None;
1426
1427 if tokens.eat(Token::LessThan)? {
1428 ty = Some(Box::new(Type::parse(tokens)?));
1429 tokens.expect(Token::GreaterThan)?;
1430 };
1431 Ok(Type::Stream(Stream { span, ty }))
1432 }
1433
1434 Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1436
1437 Some((_span, Token::Own)) => {
1439 tokens.expect(Token::LessThan)?;
1440 let resource = parse_id(tokens)?;
1441 tokens.expect(Token::GreaterThan)?;
1442 Ok(Type::Handle(Handle::Own { resource }))
1443 }
1444
1445 Some((_span, Token::Borrow)) => {
1447 tokens.expect(Token::LessThan)?;
1448 let resource = parse_id(tokens)?;
1449 tokens.expect(Token::GreaterThan)?;
1450 Ok(Type::Handle(Handle::Borrow { resource }))
1451 }
1452
1453 Some((span, Token::Id)) => Ok(Type::Name(Id {
1455 name: tokens.parse_id(span)?.into(),
1456 span,
1457 })),
1458 Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1460 name: tokens.parse_explicit_id(span)?.into(),
1461 span,
1462 })),
1463
1464 other => Err(err_expected(tokens, "a type", other).into()),
1465 }
1466 }
1467
1468 fn span(&self) -> Span {
1469 match self {
1470 Type::Bool(span)
1471 | Type::U8(span)
1472 | Type::U16(span)
1473 | Type::U32(span)
1474 | Type::U64(span)
1475 | Type::S8(span)
1476 | Type::S16(span)
1477 | Type::S32(span)
1478 | Type::S64(span)
1479 | Type::F32(span)
1480 | Type::F64(span)
1481 | Type::Char(span)
1482 | Type::String(span)
1483 | Type::ErrorContext(span) => *span,
1484 Type::Name(id) => id.span,
1485 Type::List(l) => l.span,
1486 Type::Handle(h) => h.span(),
1487 Type::Resource(r) => r.span,
1488 Type::Record(r) => r.span,
1489 Type::Flags(f) => f.span,
1490 Type::Variant(v) => v.span,
1491 Type::Tuple(t) => t.span,
1492 Type::Enum(e) => e.span,
1493 Type::Option(o) => o.span,
1494 Type::Result(r) => r.span,
1495 Type::Future(f) => f.span,
1496 Type::Stream(s) => s.span,
1497 }
1498 }
1499}
1500
1501fn parse_list<'a, T>(
1502 tokens: &mut Tokenizer<'a>,
1503 start: Token,
1504 end: Token,
1505 parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1506) -> Result<Vec<T>> {
1507 tokens.expect(start)?;
1508 parse_list_trailer(tokens, end, parse)
1509}
1510
1511fn parse_list_trailer<'a, T>(
1512 tokens: &mut Tokenizer<'a>,
1513 end: Token,
1514 mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1515) -> Result<Vec<T>> {
1516 let mut items = Vec::new();
1517 loop {
1518 let docs = parse_docs(tokens)?;
1520
1521 if tokens.eat(end)? {
1523 break;
1524 }
1525
1526 let item = parse(docs, tokens)?;
1527 items.push(item);
1528
1529 if !tokens.eat(Token::Comma)? {
1532 tokens.expect(end)?;
1533 break;
1534 }
1535 }
1536 Ok(items)
1537}
1538
1539fn err_expected(
1540 tokens: &Tokenizer<'_>,
1541 expected: &'static str,
1542 found: Option<(Span, Token)>,
1543) -> Error {
1544 match found {
1545 Some((span, token)) => Error::new(
1546 span,
1547 format!("expected {}, found {}", expected, token.describe()),
1548 ),
1549 None => Error::new(
1550 tokens.eof_span(),
1551 format!("expected {}, found eof", expected),
1552 ),
1553 }
1554}
1555
1556enum Attribute<'a> {
1557 Since { span: Span, version: Version },
1558 Unstable { span: Span, feature: Id<'a> },
1559 Deprecated { span: Span, version: Version },
1560}
1561
1562impl<'a> Attribute<'a> {
1563 fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1564 let mut ret = Vec::new();
1565 while tokens.eat(Token::At)? {
1566 let id = parse_id(tokens)?;
1567 let attr = match id.name {
1568 "since" => {
1569 tokens.expect(Token::LeftParen)?;
1570 eat_id(tokens, "version")?;
1571 tokens.expect(Token::Equals)?;
1572 let (_span, version) = parse_version(tokens)?;
1573 tokens.expect(Token::RightParen)?;
1574 Attribute::Since {
1575 span: id.span,
1576 version,
1577 }
1578 }
1579 "unstable" => {
1580 tokens.expect(Token::LeftParen)?;
1581 eat_id(tokens, "feature")?;
1582 tokens.expect(Token::Equals)?;
1583 let feature = parse_id(tokens)?;
1584 tokens.expect(Token::RightParen)?;
1585 Attribute::Unstable {
1586 span: id.span,
1587 feature,
1588 }
1589 }
1590 "deprecated" => {
1591 tokens.expect(Token::LeftParen)?;
1592 eat_id(tokens, "version")?;
1593 tokens.expect(Token::Equals)?;
1594 let (_span, version) = parse_version(tokens)?;
1595 tokens.expect(Token::RightParen)?;
1596 Attribute::Deprecated {
1597 span: id.span,
1598 version,
1599 }
1600 }
1601 other => {
1602 bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1603 }
1604 };
1605 ret.push(attr);
1606 }
1607 Ok(ret)
1608 }
1609
1610 fn span(&self) -> Span {
1611 match self {
1612 Attribute::Since { span, .. }
1613 | Attribute::Unstable { span, .. }
1614 | Attribute::Deprecated { span, .. } => *span,
1615 }
1616 }
1617}
1618
1619fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1620 let id = parse_id(tokens)?;
1621 if id.name != expected {
1622 bail!(Error::new(
1623 id.span,
1624 format!("expected `{expected}`, found `{}`", id.name),
1625 ));
1626 }
1627 Ok(id.span)
1628}
1629
1630#[derive(Clone, Default)]
1635pub struct SourceMap {
1636 sources: Vec<Source>,
1637 offset: u32,
1638 require_f32_f64: Option<bool>,
1639}
1640
1641#[derive(Clone)]
1642struct Source {
1643 offset: u32,
1644 path: PathBuf,
1645 contents: String,
1646}
1647
1648impl SourceMap {
1649 pub fn new() -> SourceMap {
1651 SourceMap::default()
1652 }
1653
1654 #[doc(hidden)] pub fn set_require_f32_f64(&mut self, enable: bool) {
1656 self.require_f32_f64 = Some(enable);
1657 }
1658
1659 pub fn push_file(&mut self, path: &Path) -> Result<()> {
1662 let contents = std::fs::read_to_string(path)
1663 .with_context(|| format!("failed to read file {path:?}"))?;
1664 self.push(path, contents);
1665 Ok(())
1666 }
1667
1668 pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1676 let mut contents = contents.into();
1677 contents.push('\n');
1683 let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1684 self.sources.push(Source {
1685 offset: self.offset,
1686 path: path.to_path_buf(),
1687 contents,
1688 });
1689 self.offset = new_offset;
1690 }
1691
1692 pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1695 let mut nested = Vec::new();
1696 let main = self.rewrite_error(|| {
1697 let mut resolver = Resolver::default();
1698 let mut srcs = self.sources.iter().collect::<Vec<_>>();
1699 srcs.sort_by_key(|src| &src.path);
1700
1701 for src in srcs {
1705 let mut tokens = Tokenizer::new(
1706 &src.contents[..src.contents.len() - 1],
1709 src.offset,
1710 self.require_f32_f64,
1711 )
1712 .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?;
1713 let mut file = PackageFile::parse(&mut tokens)?;
1714
1715 for item in mem::take(&mut file.decl_list.items) {
1724 match item {
1725 AstItem::Package(nested_pkg) => {
1726 let mut resolve = Resolver::default();
1727 resolve.push(nested_pkg).with_context(|| {
1728 format!(
1729 "failed to handle nested package in: {}",
1730 src.path.display()
1731 )
1732 })?;
1733
1734 nested.push(resolve.resolve()?);
1735 }
1736 other => file.decl_list.items.push(other),
1737 }
1738 }
1739
1740 resolver.push(file).with_context(|| {
1743 format!("failed to start resolving path: {}", src.path.display())
1744 })?;
1745 }
1746 Ok(resolver.resolve()?)
1747 })?;
1748 Ok(UnresolvedPackageGroup {
1749 main,
1750 nested,
1751 source_map: self,
1752 })
1753 }
1754
1755 pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1756 where
1757 F: FnOnce() -> Result<T>,
1758 {
1759 let mut err = match f() {
1760 Ok(t) => return Ok(t),
1761 Err(e) => e,
1762 };
1763 if let Some(parse) = err.downcast_mut::<Error>() {
1764 if parse.highlighted.is_none() {
1765 let msg = self.highlight_err(parse.span.start, Some(parse.span.end), &parse.msg);
1766 parse.highlighted = Some(msg);
1767 }
1768 }
1769 if let Some(_) = err.downcast_mut::<Error>() {
1770 return Err(err);
1771 }
1772 if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1773 if notfound.highlighted.is_none() {
1774 let msg = self.highlight_err(
1775 notfound.span.start,
1776 Some(notfound.span.end),
1777 &format!("{notfound}"),
1778 );
1779 notfound.highlighted = Some(msg);
1780 }
1781 }
1782 if let Some(_) = err.downcast_mut::<PackageNotFoundError>() {
1783 return Err(err);
1784 }
1785
1786 if let Some(lex) = err.downcast_ref::<lex::Error>() {
1787 let pos = match lex {
1788 lex::Error::Unexpected(at, _)
1789 | lex::Error::UnterminatedComment(at)
1790 | lex::Error::Wanted { at, .. }
1791 | lex::Error::InvalidCharInId(at, _)
1792 | lex::Error::IdPartEmpty(at)
1793 | lex::Error::InvalidEscape(at, _) => *at,
1794 };
1795 let msg = self.highlight_err(pos, None, lex);
1796 bail!("{msg}")
1797 }
1798
1799 if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1800 if sort.highlighted().is_none() {
1801 let span = match sort {
1802 toposort::Error::NonexistentDep { span, .. }
1803 | toposort::Error::Cycle { span, .. } => *span,
1804 };
1805 let highlighted = self.highlight_err(span.start, Some(span.end), &sort);
1806 sort.set_highlighted(highlighted);
1807 }
1808 }
1809
1810 Err(err)
1811 }
1812
1813 fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1814 let src = self.source_for_offset(start);
1815 let start = src.to_relative_offset(start);
1816 let end = end.map(|end| src.to_relative_offset(end));
1817 let (line, col) = src.linecol(start);
1818 let snippet = src.contents.lines().nth(line).unwrap_or("");
1819 let mut msg = format!(
1820 "\
1821{err}
1822 --> {file}:{line}:{col}
1823 |
1824 {line:4} | {snippet}
1825 | {marker:>0$}",
1826 col + 1,
1827 file = src.path.display(),
1828 line = line + 1,
1829 col = col + 1,
1830 marker = "^",
1831 );
1832 if let Some(end) = end {
1833 if let Some(s) = src.contents.get(start..end) {
1834 for _ in s.chars().skip(1) {
1835 msg.push('-');
1836 }
1837 }
1838 }
1839 return msg;
1840 }
1841
1842 pub(crate) fn render_location(&self, span: Span) -> String {
1843 let src = self.source_for_offset(span.start);
1844 let start = src.to_relative_offset(span.start);
1845 let (line, col) = src.linecol(start);
1846 format!(
1847 "{file}:{line}:{col}",
1848 file = src.path.display(),
1849 line = line + 1,
1850 col = col + 1,
1851 )
1852 }
1853
1854 fn source_for_offset(&self, start: u32) -> &Source {
1855 let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1856 Ok(i) => i,
1857 Err(i) => i - 1,
1858 };
1859 &self.sources[i]
1860 }
1861
1862 pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1864 self.sources.iter().map(|src| src.path.as_path())
1865 }
1866}
1867
1868impl Source {
1869 fn to_relative_offset(&self, offset: u32) -> usize {
1870 usize::try_from(offset - self.offset).unwrap()
1871 }
1872
1873 fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1874 let mut cur = 0;
1875 for (i, line) in self.contents.split_terminator('\n').enumerate() {
1879 if cur + line.len() + 1 > relative_offset {
1880 return (i, relative_offset - cur);
1881 }
1882 cur += line.len() + 1;
1883 }
1884 (self.contents.lines().count(), 0)
1885 }
1886}
1887
1888pub enum ParsedUsePath {
1889 Name(String),
1890 Package(crate::PackageName, String),
1891}
1892
1893pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1894 let mut tokens = Tokenizer::new(s, 0, None)?;
1895 let path = UsePath::parse(&mut tokens)?;
1896 if tokens.next()?.is_some() {
1897 bail!("trailing tokens in path specifier");
1898 }
1899 Ok(match path {
1900 UsePath::Id(id) => ParsedUsePath::Name(id.name.to_string()),
1901 UsePath::Package { id, name } => {
1902 ParsedUsePath::Package(id.package_name(), name.name.to_string())
1903 }
1904 })
1905}