1use crate::{Error, PackageNotFoundError, UnresolvedPackageGroup};
2use anyhow::{bail, Context, Result};
3use lex::{Span, Token, Tokenizer};
4use semver::Version;
5use std::borrow::Cow;
6use std::fmt;
7use std::mem;
8use std::path::{Path, PathBuf};
9
10pub mod lex;
11
12pub use resolve::Resolver;
13mod resolve;
14pub mod toposort;
15
16pub use lex::validate_id;
17
18/// Representation of a single WIT `*.wit` file and nested packages.
19struct PackageFile<'a> {
20 /// Optional `package foo:bar;` header
21 package_id: Option<PackageName<'a>>,
22 /// Other AST items.
23 decl_list: DeclList<'a>,
24}
25
26impl<'a> PackageFile<'a> {
27 /// Parse a standalone file represented by `tokens`.
28 ///
29 /// This will optionally start with `package foo:bar;` and then will have a
30 /// list of ast items after it.
31 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
32 let mut package_name_tokens_peek = tokens.clone();
33 let docs = parse_docs(&mut package_name_tokens_peek)?;
34
35 // Parse `package foo:bar;` but throw it out if it's actually
36 // `package foo:bar { ... }` since that's an ast item instead.
37 let package_id = if package_name_tokens_peek.eat(Token::Package)? {
38 let name = PackageName::parse(&mut package_name_tokens_peek, docs)?;
39 if package_name_tokens_peek.eat(Token::Semicolon)? {
40 *tokens = package_name_tokens_peek;
41 Some(name)
42 } else {
43 None
44 }
45 } else {
46 None
47 };
48 let decl_list = DeclList::parse_until(tokens, None)?;
49 Ok(PackageFile {
50 package_id,
51 decl_list,
52 })
53 }
54
55 /// Parse a nested package of the form `package foo:bar { ... }`
56 fn parse_nested(
57 tokens: &mut Tokenizer<'a>,
58 docs: Docs<'a>,
59 attributes: Vec<Attribute<'a>>,
60 ) -> Result<Self> {
61 let span = tokens.expect(Token::Package)?;
62 if !attributes.is_empty() {
63 bail!(Error::new(
64 span,
65 format!("cannot place attributes on nested packages"),
66 ));
67 }
68 let package_id = PackageName::parse(tokens, docs)?;
69 tokens.expect(Token::LeftBrace)?;
70 let decl_list = DeclList::parse_until(tokens, Some(Token::RightBrace))?;
71 Ok(PackageFile {
72 package_id: Some(package_id),
73 decl_list,
74 })
75 }
76}
77
78/// Stores all of the declarations in a package's scope. In AST terms, this
79/// means everything except the `package` declaration that demarcates a package
80/// scope. In the traditional implicit format, these are all of the declarations
81/// non-`package` declarations in the file:
82///
83/// ```wit
84/// package foo:name;
85///
86/// /* START DECL LIST */
87/// // Some comment...
88/// interface i {}
89/// world w {}
90/// /* END DECL LIST */
91/// ```
92///
93/// In the nested package style, a [`DeclList`] is everything inside of each
94/// `package` element's brackets:
95///
96/// ```wit
97/// package foo:name {
98/// /* START FIRST DECL LIST */
99/// // Some comment...
100/// interface i {}
101/// world w {}
102/// /* END FIRST DECL LIST */
103/// }
104///
105/// package bar:name {
106/// /* START SECOND DECL LIST */
107/// // Some comment...
108/// interface i {}
109/// world w {}
110/// /* END SECOND DECL LIST */
111/// }
112/// ```
113#[derive(Default)]
114pub struct DeclList<'a> {
115 items: Vec<AstItem<'a>>,
116}
117
118impl<'a> DeclList<'a> {
119 fn parse_until(tokens: &mut Tokenizer<'a>, end: Option<Token>) -> Result<DeclList<'a>> {
120 let mut items = Vec::new();
121 let mut docs = parse_docs(tokens)?;
122 loop {
123 match end {
124 Some(end) => {
125 if tokens.eat(end)? {
126 break;
127 }
128 }
129 None => {
130 if tokens.clone().next()?.is_none() {
131 break;
132 }
133 }
134 }
135 items.push(AstItem::parse(tokens, docs)?);
136 docs = parse_docs(tokens)?;
137 }
138 Ok(DeclList { items })
139 }
140
141 fn for_each_path<'b>(
142 &'b self,
143 f: &mut dyn FnMut(
144 Option<&'b Id<'a>>,
145 &'b UsePath<'a>,
146 Option<&'b [UseName<'a>]>,
147 WorldOrInterface,
148 ) -> Result<()>,
149 ) -> Result<()> {
150 for item in self.items.iter() {
151 match item {
152 AstItem::World(world) => {
153 // Visit imports here first before exports to help preserve
154 // round-tripping of documents because printing a world puts
155 // imports first but textually they can be listed with
156 // exports first.
157 let mut imports = Vec::new();
158 let mut exports = Vec::new();
159 for item in world.items.iter() {
160 match item {
161 WorldItem::Use(u) => {
162 f(None, &u.from, Some(&u.names), WorldOrInterface::Interface)?
163 }
164 WorldItem::Include(i) => {
165 f(Some(&world.name), &i.from, None, WorldOrInterface::World)?
166 }
167 WorldItem::Type(_) => {}
168 WorldItem::Import(Import { kind, .. }) => imports.push(kind),
169 WorldItem::Export(Export { kind, .. }) => exports.push(kind),
170 }
171 }
172
173 let mut visit_kind = |kind: &'b ExternKind<'a>| match kind {
174 ExternKind::Interface(_, items) => {
175 for item in items {
176 match item {
177 InterfaceItem::Use(u) => f(
178 None,
179 &u.from,
180 Some(&u.names),
181 WorldOrInterface::Interface,
182 )?,
183 _ => {}
184 }
185 }
186 Ok(())
187 }
188 ExternKind::Path(path) => f(None, path, None, WorldOrInterface::Interface),
189 ExternKind::Func(..) => Ok(()),
190 };
191
192 for kind in imports {
193 visit_kind(kind)?;
194 }
195 for kind in exports {
196 visit_kind(kind)?;
197 }
198 }
199 AstItem::Interface(i) => {
200 for item in i.items.iter() {
201 match item {
202 InterfaceItem::Use(u) => f(
203 Some(&i.name),
204 &u.from,
205 Some(&u.names),
206 WorldOrInterface::Interface,
207 )?,
208 _ => {}
209 }
210 }
211 }
212 AstItem::Use(u) => {
213 // At the top-level, we don't know if this is a world or an interface
214 // It is up to the resolver to decides how to handle this ambiguity.
215 f(None, &u.item, None, WorldOrInterface::Unknown)?;
216 }
217
218 AstItem::Package(pkg) => pkg.decl_list.for_each_path(f)?,
219 }
220 }
221 Ok(())
222 }
223}
224
225enum AstItem<'a> {
226 Interface(Interface<'a>),
227 World(World<'a>),
228 Use(ToplevelUse<'a>),
229 Package(PackageFile<'a>),
230}
231
232impl<'a> AstItem<'a> {
233 fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
234 let attributes: Vec> = Attribute::parse_list(tokens)?;
235 match tokens.clone().next()? {
236 Some((_span: Span, Token::Interface)) => {
237 Interface::parse(tokens, docs, attributes).map(Self::Interface)
238 }
239 Some((_span: Span, Token::World)) => World::parse(tokens, docs, attributes).map(Self::World),
240 Some((_span: Span, Token::Use)) => ToplevelUse::parse(tokens, attributes).map(Self::Use),
241 Some((_span: Span, Token::Package)) => {
242 PackageFile::parse_nested(tokens, docs, attributes).map(Self::Package)
243 }
244 other: Option<(Span, Token)> => Err(err_expected(tokens, expected:"`world`, `interface` or `use`", found:other).into()),
245 }
246 }
247}
248
249#[derive(Debug, Clone)]
250struct PackageName<'a> {
251 docs: Docs<'a>,
252 span: Span,
253 namespace: Id<'a>,
254 name: Id<'a>,
255 version: Option<(Span, Version)>,
256}
257
258impl<'a> PackageName<'a> {
259 fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
260 let namespace = parse_id(tokens)?;
261 tokens.expect(Token::Colon)?;
262 let name = parse_id(tokens)?;
263 let version = parse_opt_version(tokens)?;
264 Ok(PackageName {
265 docs,
266 span: Span {
267 start: namespace.span.start,
268 end: version
269 .as_ref()
270 .map(|(s, _)| s.end)
271 .unwrap_or(name.span.end),
272 },
273 namespace,
274 name,
275 version,
276 })
277 }
278
279 fn package_name(&self) -> crate::PackageName {
280 crate::PackageName {
281 namespace: self.namespace.name.to_string(),
282 name: self.name.name.to_string(),
283 version: self.version.as_ref().map(|(_, v)| v.clone()),
284 }
285 }
286}
287
288struct ToplevelUse<'a> {
289 span: Span,
290 attributes: Vec<Attribute<'a>>,
291 item: UsePath<'a>,
292 as_: Option<Id<'a>>,
293}
294
295impl<'a> ToplevelUse<'a> {
296 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
297 let span: Span = tokens.expect(expected:Token::Use)?;
298 let item: UsePath<'_> = UsePath::parse(tokens)?;
299 let as_: Option> = if tokens.eat(expected:Token::As)? {
300 Some(parse_id(tokens)?)
301 } else {
302 None
303 };
304 tokens.expect_semicolon()?;
305 Ok(ToplevelUse {
306 span,
307 attributes,
308 item,
309 as_,
310 })
311 }
312}
313
314struct World<'a> {
315 docs: Docs<'a>,
316 attributes: Vec<Attribute<'a>>,
317 name: Id<'a>,
318 items: Vec<WorldItem<'a>>,
319}
320
321impl<'a> World<'a> {
322 fn parse(
323 tokens: &mut Tokenizer<'a>,
324 docs: Docs<'a>,
325 attributes: Vec<Attribute<'a>>,
326 ) -> Result<Self> {
327 tokens.expect(Token::World)?;
328 let name = parse_id(tokens)?;
329 let items = Self::parse_items(tokens)?;
330 Ok(World {
331 docs,
332 attributes,
333 name,
334 items,
335 })
336 }
337
338 fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<WorldItem<'a>>> {
339 tokens.expect(Token::LeftBrace)?;
340 let mut items = Vec::new();
341 loop {
342 let docs = parse_docs(tokens)?;
343 if tokens.eat(Token::RightBrace)? {
344 break;
345 }
346 let attributes = Attribute::parse_list(tokens)?;
347 items.push(WorldItem::parse(tokens, docs, attributes)?);
348 }
349 Ok(items)
350 }
351}
352
353enum WorldItem<'a> {
354 Import(Import<'a>),
355 Export(Export<'a>),
356 Use(Use<'a>),
357 Type(TypeDef<'a>),
358 Include(Include<'a>),
359}
360
361impl<'a> WorldItem<'a> {
362 fn parse(
363 tokens: &mut Tokenizer<'a>,
364 docs: Docs<'a>,
365 attributes: Vec<Attribute<'a>>,
366 ) -> Result<WorldItem<'a>> {
367 match tokens.clone().next()? {
368 Some((_span, Token::Import)) => {
369 Import::parse(tokens, docs, attributes).map(WorldItem::Import)
370 }
371 Some((_span, Token::Export)) => {
372 Export::parse(tokens, docs, attributes).map(WorldItem::Export)
373 }
374 Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(WorldItem::Use),
375 Some((_span, Token::Type)) => {
376 TypeDef::parse(tokens, docs, attributes).map(WorldItem::Type)
377 }
378 Some((_span, Token::Flags)) => {
379 TypeDef::parse_flags(tokens, docs, attributes).map(WorldItem::Type)
380 }
381 Some((_span, Token::Resource)) => {
382 TypeDef::parse_resource(tokens, docs, attributes).map(WorldItem::Type)
383 }
384 Some((_span, Token::Record)) => {
385 TypeDef::parse_record(tokens, docs, attributes).map(WorldItem::Type)
386 }
387 Some((_span, Token::Variant)) => {
388 TypeDef::parse_variant(tokens, docs, attributes).map(WorldItem::Type)
389 }
390 Some((_span, Token::Enum)) => {
391 TypeDef::parse_enum(tokens, docs, attributes).map(WorldItem::Type)
392 }
393 Some((_span, Token::Include)) => {
394 Include::parse(tokens, attributes).map(WorldItem::Include)
395 }
396 other => Err(err_expected(
397 tokens,
398 "`import`, `export`, `include`, `use`, or type definition",
399 other,
400 )
401 .into()),
402 }
403 }
404}
405
406struct Import<'a> {
407 docs: Docs<'a>,
408 attributes: Vec<Attribute<'a>>,
409 kind: ExternKind<'a>,
410}
411
412impl<'a> Import<'a> {
413 fn parse(
414 tokens: &mut Tokenizer<'a>,
415 docs: Docs<'a>,
416 attributes: Vec<Attribute<'a>>,
417 ) -> Result<Import<'a>> {
418 tokens.expect(expected:Token::Import)?;
419 let kind: ExternKind<'_> = ExternKind::parse(tokens)?;
420 Ok(Import {
421 docs,
422 attributes,
423 kind,
424 })
425 }
426}
427
428struct Export<'a> {
429 docs: Docs<'a>,
430 attributes: Vec<Attribute<'a>>,
431 kind: ExternKind<'a>,
432}
433
434impl<'a> Export<'a> {
435 fn parse(
436 tokens: &mut Tokenizer<'a>,
437 docs: Docs<'a>,
438 attributes: Vec<Attribute<'a>>,
439 ) -> Result<Export<'a>> {
440 tokens.expect(expected:Token::Export)?;
441 let kind: ExternKind<'_> = ExternKind::parse(tokens)?;
442 Ok(Export {
443 docs,
444 attributes,
445 kind,
446 })
447 }
448}
449
450enum ExternKind<'a> {
451 Interface(Id<'a>, Vec<InterfaceItem<'a>>),
452 Path(UsePath<'a>),
453 Func(Id<'a>, Func<'a>),
454}
455
456impl<'a> ExternKind<'a> {
457 fn parse(tokens: &mut Tokenizer<'a>) -> Result<ExternKind<'a>> {
458 // Create a copy of the token stream to test out if this is a function
459 // or an interface import. In those situations the token stream gets
460 // reset to the state of the clone and we continue down those paths.
461 //
462 // If neither a function nor an interface appears here though then the
463 // clone is thrown away and the original token stream is parsed for an
464 // interface. This will redo the original ID parse and the original
465 // colon parse, but that shouldn't be too too bad perf-wise.
466 let mut clone = tokens.clone();
467 let id = parse_id(&mut clone)?;
468 if clone.eat(Token::Colon)? {
469 // import foo: func(...)
470 if clone.clone().eat(Token::Func)? {
471 *tokens = clone;
472 let ret = ExternKind::Func(id, Func::parse(tokens)?);
473 tokens.expect_semicolon()?;
474 return Ok(ret);
475 }
476
477 // import foo: interface { ... }
478 if clone.eat(Token::Interface)? {
479 *tokens = clone;
480 return Ok(ExternKind::Interface(id, Interface::parse_items(tokens)?));
481 }
482 }
483
484 // import foo
485 // import foo/bar
486 // import foo:bar/baz
487 let ret = ExternKind::Path(UsePath::parse(tokens)?);
488 tokens.expect_semicolon()?;
489 Ok(ret)
490 }
491
492 fn span(&self) -> Span {
493 match self {
494 ExternKind::Interface(id, _) => id.span,
495 ExternKind::Path(UsePath::Id(id)) => id.span,
496 ExternKind::Path(UsePath::Package { name, .. }) => name.span,
497 ExternKind::Func(id, _) => id.span,
498 }
499 }
500}
501
502struct Interface<'a> {
503 docs: Docs<'a>,
504 attributes: Vec<Attribute<'a>>,
505 name: Id<'a>,
506 items: Vec<InterfaceItem<'a>>,
507}
508
509impl<'a> Interface<'a> {
510 fn parse(
511 tokens: &mut Tokenizer<'a>,
512 docs: Docs<'a>,
513 attributes: Vec<Attribute<'a>>,
514 ) -> Result<Self> {
515 tokens.expect(Token::Interface)?;
516 let name = parse_id(tokens)?;
517 let items = Self::parse_items(tokens)?;
518 Ok(Interface {
519 docs,
520 attributes,
521 name,
522 items,
523 })
524 }
525
526 pub(super) fn parse_items(tokens: &mut Tokenizer<'a>) -> Result<Vec<InterfaceItem<'a>>> {
527 tokens.expect(Token::LeftBrace)?;
528 let mut items = Vec::new();
529 loop {
530 let docs = parse_docs(tokens)?;
531 if tokens.eat(Token::RightBrace)? {
532 break;
533 }
534 let attributes = Attribute::parse_list(tokens)?;
535 items.push(InterfaceItem::parse(tokens, docs, attributes)?);
536 }
537 Ok(items)
538 }
539}
540
541#[derive(Debug)]
542pub enum WorldOrInterface {
543 World,
544 Interface,
545 Unknown,
546}
547
548enum InterfaceItem<'a> {
549 TypeDef(TypeDef<'a>),
550 Func(NamedFunc<'a>),
551 Use(Use<'a>),
552}
553
554struct Use<'a> {
555 attributes: Vec<Attribute<'a>>,
556 from: UsePath<'a>,
557 names: Vec<UseName<'a>>,
558}
559
560#[derive(Debug)]
561enum UsePath<'a> {
562 Id(Id<'a>),
563 Package { id: PackageName<'a>, name: Id<'a> },
564}
565
566impl<'a> UsePath<'a> {
567 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
568 let id = parse_id(tokens)?;
569 if tokens.eat(Token::Colon)? {
570 // `foo:bar/baz@1.0`
571 let namespace = id;
572 let pkg_name = parse_id(tokens)?;
573 tokens.expect(Token::Slash)?;
574 let name = parse_id(tokens)?;
575 let version = parse_opt_version(tokens)?;
576 Ok(UsePath::Package {
577 id: PackageName {
578 docs: Default::default(),
579 span: Span {
580 start: namespace.span.start,
581 end: pkg_name.span.end,
582 },
583 namespace,
584 name: pkg_name,
585 version,
586 },
587 name,
588 })
589 } else {
590 // `foo`
591 Ok(UsePath::Id(id))
592 }
593 }
594
595 fn name(&self) -> &Id<'a> {
596 match self {
597 UsePath::Id(id) => id,
598 UsePath::Package { name, .. } => name,
599 }
600 }
601}
602
603struct UseName<'a> {
604 name: Id<'a>,
605 as_: Option<Id<'a>>,
606}
607
608impl<'a> Use<'a> {
609 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
610 tokens.expect(Token::Use)?;
611 let from = UsePath::parse(tokens)?;
612 tokens.expect(Token::Period)?;
613 tokens.expect(Token::LeftBrace)?;
614
615 let mut names = Vec::new();
616 while !tokens.eat(Token::RightBrace)? {
617 let mut name = UseName {
618 name: parse_id(tokens)?,
619 as_: None,
620 };
621 if tokens.eat(Token::As)? {
622 name.as_ = Some(parse_id(tokens)?);
623 }
624 names.push(name);
625 if !tokens.eat(Token::Comma)? {
626 tokens.expect(Token::RightBrace)?;
627 break;
628 }
629 }
630 tokens.expect_semicolon()?;
631 Ok(Use {
632 attributes,
633 from,
634 names,
635 })
636 }
637}
638
639struct Include<'a> {
640 from: UsePath<'a>,
641 attributes: Vec<Attribute<'a>>,
642 names: Vec<IncludeName<'a>>,
643}
644
645struct IncludeName<'a> {
646 name: Id<'a>,
647 as_: Id<'a>,
648}
649
650impl<'a> Include<'a> {
651 fn parse(tokens: &mut Tokenizer<'a>, attributes: Vec<Attribute<'a>>) -> Result<Self> {
652 tokens.expect(Token::Include)?;
653 let from = UsePath::parse(tokens)?;
654
655 let names = if tokens.eat(Token::With)? {
656 parse_list(
657 tokens,
658 Token::LeftBrace,
659 Token::RightBrace,
660 |_docs, tokens| {
661 let name = parse_id(tokens)?;
662 tokens.expect(Token::As)?;
663 let as_ = parse_id(tokens)?;
664 Ok(IncludeName { name, as_ })
665 },
666 )?
667 } else {
668 tokens.expect_semicolon()?;
669 Vec::new()
670 };
671
672 Ok(Include {
673 attributes,
674 from,
675 names,
676 })
677 }
678}
679
680#[derive(Debug, Clone)]
681pub struct Id<'a> {
682 name: &'a str,
683 span: Span,
684}
685
686impl<'a> From<&'a str> for Id<'a> {
687 fn from(s: &'a str) -> Id<'a> {
688 Id {
689 name: s.into(),
690 span: Span { start: 0, end: 0 },
691 }
692 }
693}
694
695#[derive(Debug, Clone)]
696pub struct Docs<'a> {
697 docs: Vec<Cow<'a, str>>,
698 span: Span,
699}
700
701impl<'a> Default for Docs<'a> {
702 fn default() -> Self {
703 Self {
704 docs: Default::default(),
705 span: Span { start: 0, end: 0 },
706 }
707 }
708}
709
710struct TypeDef<'a> {
711 docs: Docs<'a>,
712 attributes: Vec<Attribute<'a>>,
713 name: Id<'a>,
714 ty: Type<'a>,
715}
716
717enum Type<'a> {
718 Bool(Span),
719 U8(Span),
720 U16(Span),
721 U32(Span),
722 U64(Span),
723 S8(Span),
724 S16(Span),
725 S32(Span),
726 S64(Span),
727 F32(Span),
728 F64(Span),
729 Char(Span),
730 String(Span),
731 Name(Id<'a>),
732 List(List<'a>),
733 Handle(Handle<'a>),
734 Resource(Resource<'a>),
735 Record(Record<'a>),
736 Flags(Flags<'a>),
737 Variant(Variant<'a>),
738 Tuple(Tuple<'a>),
739 Enum(Enum<'a>),
740 Option(Option_<'a>),
741 Result(Result_<'a>),
742 Future(Future<'a>),
743 Stream(Stream<'a>),
744 ErrorContext(Span),
745}
746
747enum Handle<'a> {
748 Own { resource: Id<'a> },
749 Borrow { resource: Id<'a> },
750}
751
752impl Handle<'_> {
753 fn span(&self) -> Span {
754 match self {
755 Handle::Own { resource: &Id<'_> } | Handle::Borrow { resource: &Id<'_> } => resource.span,
756 }
757 }
758}
759
760struct Resource<'a> {
761 span: Span,
762 funcs: Vec<ResourceFunc<'a>>,
763}
764
765enum ResourceFunc<'a> {
766 Method(NamedFunc<'a>),
767 Static(NamedFunc<'a>),
768 Constructor(NamedFunc<'a>),
769}
770
771impl<'a> ResourceFunc<'a> {
772 fn parse(
773 docs: Docs<'a>,
774 attributes: Vec<Attribute<'a>>,
775 tokens: &mut Tokenizer<'a>,
776 ) -> Result<Self> {
777 match tokens.clone().next()? {
778 Some((span, Token::Constructor)) => {
779 tokens.expect(Token::Constructor)?;
780 tokens.expect(Token::LeftParen)?;
781 let params = parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
782 let name = parse_id(tokens)?;
783 tokens.expect(Token::Colon)?;
784 let ty = Type::parse(tokens)?;
785 Ok((name, ty))
786 })?;
787 tokens.expect_semicolon()?;
788 Ok(ResourceFunc::Constructor(NamedFunc {
789 docs,
790 attributes,
791 name: Id {
792 span,
793 name: "constructor",
794 },
795 func: Func {
796 span,
797 params,
798 results: ResultList::Named(Vec::new()),
799 },
800 }))
801 }
802 Some((_span, Token::Id | Token::ExplicitId)) => {
803 let name = parse_id(tokens)?;
804 tokens.expect(Token::Colon)?;
805 let ctor = if tokens.eat(Token::Static)? {
806 ResourceFunc::Static
807 } else {
808 ResourceFunc::Method
809 };
810 let func = Func::parse(tokens)?;
811 tokens.expect_semicolon()?;
812 Ok(ctor(NamedFunc {
813 docs,
814 attributes,
815 name,
816 func,
817 }))
818 }
819 other => Err(err_expected(tokens, "`constructor` or identifier", other).into()),
820 }
821 }
822
823 fn named_func(&self) -> &NamedFunc<'a> {
824 use ResourceFunc::*;
825 match self {
826 Method(f) | Static(f) | Constructor(f) => f,
827 }
828 }
829}
830
831struct Record<'a> {
832 span: Span,
833 fields: Vec<Field<'a>>,
834}
835
836struct Field<'a> {
837 docs: Docs<'a>,
838 name: Id<'a>,
839 ty: Type<'a>,
840}
841
842struct Flags<'a> {
843 span: Span,
844 flags: Vec<Flag<'a>>,
845}
846
847struct Flag<'a> {
848 docs: Docs<'a>,
849 name: Id<'a>,
850}
851
852struct Variant<'a> {
853 span: Span,
854 cases: Vec<Case<'a>>,
855}
856
857struct Case<'a> {
858 docs: Docs<'a>,
859 name: Id<'a>,
860 ty: Option<Type<'a>>,
861}
862
863struct Enum<'a> {
864 span: Span,
865 cases: Vec<EnumCase<'a>>,
866}
867
868struct EnumCase<'a> {
869 docs: Docs<'a>,
870 name: Id<'a>,
871}
872
873struct Option_<'a> {
874 span: Span,
875 ty: Box<Type<'a>>,
876}
877
878struct List<'a> {
879 span: Span,
880 ty: Box<Type<'a>>,
881}
882
883struct Future<'a> {
884 span: Span,
885 ty: Option<Box<Type<'a>>>,
886}
887
888struct Tuple<'a> {
889 span: Span,
890 types: Vec<Type<'a>>,
891}
892
893struct Result_<'a> {
894 span: Span,
895 ok: Option<Box<Type<'a>>>,
896 err: Option<Box<Type<'a>>>,
897}
898
899struct Stream<'a> {
900 span: Span,
901 ty: Box<Type<'a>>,
902}
903
904struct NamedFunc<'a> {
905 docs: Docs<'a>,
906 attributes: Vec<Attribute<'a>>,
907 name: Id<'a>,
908 func: Func<'a>,
909}
910
911type ParamList<'a> = Vec<(Id<'a>, Type<'a>)>;
912
913enum ResultList<'a> {
914 Named(ParamList<'a>),
915 Anon(Type<'a>),
916}
917
918struct Func<'a> {
919 span: Span,
920 params: ParamList<'a>,
921 results: ResultList<'a>,
922}
923
924impl<'a> Func<'a> {
925 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Func<'a>> {
926 fn parse_params<'a>(tokens: &mut Tokenizer<'a>, left_paren: bool) -> Result<ParamList<'a>> {
927 if left_paren {
928 tokens.expect(Token::LeftParen)?;
929 };
930 parse_list_trailer(tokens, Token::RightParen, |_docs, tokens| {
931 let name = parse_id(tokens)?;
932 tokens.expect(Token::Colon)?;
933 let ty = Type::parse(tokens)?;
934 Ok((name, ty))
935 })
936 }
937
938 let span = tokens.expect(Token::Func)?;
939 let params = parse_params(tokens, true)?;
940 let results = if tokens.eat(Token::RArrow)? {
941 // If we eat a '(', parse the remainder of the named
942 // result types. Otherwise parse a single anonymous type.
943 if tokens.eat(Token::LeftParen)? {
944 let results = parse_params(tokens, false)?;
945 ResultList::Named(results)
946 } else {
947 let ty = Type::parse(tokens)?;
948 ResultList::Anon(ty)
949 }
950 } else {
951 ResultList::Named(Vec::new())
952 };
953 Ok(Func {
954 span,
955 params,
956 results,
957 })
958 }
959}
960
961impl<'a> InterfaceItem<'a> {
962 fn parse(
963 tokens: &mut Tokenizer<'a>,
964 docs: Docs<'a>,
965 attributes: Vec<Attribute<'a>>,
966 ) -> Result<InterfaceItem<'a>> {
967 match tokens.clone().next()? {
968 Some((_span, Token::Type)) => {
969 TypeDef::parse(tokens, docs, attributes).map(InterfaceItem::TypeDef)
970 }
971 Some((_span, Token::Flags)) => {
972 TypeDef::parse_flags(tokens, docs, attributes).map(InterfaceItem::TypeDef)
973 }
974 Some((_span, Token::Enum)) => {
975 TypeDef::parse_enum(tokens, docs, attributes).map(InterfaceItem::TypeDef)
976 }
977 Some((_span, Token::Variant)) => {
978 TypeDef::parse_variant(tokens, docs, attributes).map(InterfaceItem::TypeDef)
979 }
980 Some((_span, Token::Resource)) => {
981 TypeDef::parse_resource(tokens, docs, attributes).map(InterfaceItem::TypeDef)
982 }
983 Some((_span, Token::Record)) => {
984 TypeDef::parse_record(tokens, docs, attributes).map(InterfaceItem::TypeDef)
985 }
986 Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
987 NamedFunc::parse(tokens, docs, attributes).map(InterfaceItem::Func)
988 }
989 Some((_span, Token::Use)) => Use::parse(tokens, attributes).map(InterfaceItem::Use),
990 other => Err(err_expected(tokens, "`type`, `resource` or `func`", other).into()),
991 }
992 }
993}
994
995impl<'a> TypeDef<'a> {
996 fn parse(
997 tokens: &mut Tokenizer<'a>,
998 docs: Docs<'a>,
999 attributes: Vec<Attribute<'a>>,
1000 ) -> Result<Self> {
1001 tokens.expect(Token::Type)?;
1002 let name = parse_id(tokens)?;
1003 tokens.expect(Token::Equals)?;
1004 let ty = Type::parse(tokens)?;
1005 tokens.expect_semicolon()?;
1006 Ok(TypeDef {
1007 docs,
1008 attributes,
1009 name,
1010 ty,
1011 })
1012 }
1013
1014 fn parse_flags(
1015 tokens: &mut Tokenizer<'a>,
1016 docs: Docs<'a>,
1017 attributes: Vec<Attribute<'a>>,
1018 ) -> Result<Self> {
1019 tokens.expect(Token::Flags)?;
1020 let name = parse_id(tokens)?;
1021 let ty = Type::Flags(Flags {
1022 span: name.span,
1023 flags: parse_list(
1024 tokens,
1025 Token::LeftBrace,
1026 Token::RightBrace,
1027 |docs, tokens| {
1028 let name = parse_id(tokens)?;
1029 Ok(Flag { docs, name })
1030 },
1031 )?,
1032 });
1033 Ok(TypeDef {
1034 docs,
1035 attributes,
1036 name,
1037 ty,
1038 })
1039 }
1040
1041 fn parse_resource(
1042 tokens: &mut Tokenizer<'a>,
1043 docs: Docs<'a>,
1044 attributes: Vec<Attribute<'a>>,
1045 ) -> Result<Self> {
1046 tokens.expect(Token::Resource)?;
1047 let name = parse_id(tokens)?;
1048 let mut funcs = Vec::new();
1049 if tokens.eat(Token::LeftBrace)? {
1050 while !tokens.eat(Token::RightBrace)? {
1051 let docs = parse_docs(tokens)?;
1052 let attributes = Attribute::parse_list(tokens)?;
1053 funcs.push(ResourceFunc::parse(docs, attributes, tokens)?);
1054 }
1055 } else {
1056 tokens.expect_semicolon()?;
1057 }
1058 let ty = Type::Resource(Resource {
1059 span: name.span,
1060 funcs,
1061 });
1062 Ok(TypeDef {
1063 docs,
1064 attributes,
1065 name,
1066 ty,
1067 })
1068 }
1069
1070 fn parse_record(
1071 tokens: &mut Tokenizer<'a>,
1072 docs: Docs<'a>,
1073 attributes: Vec<Attribute<'a>>,
1074 ) -> Result<Self> {
1075 tokens.expect(Token::Record)?;
1076 let name = parse_id(tokens)?;
1077 let ty = Type::Record(Record {
1078 span: name.span,
1079 fields: parse_list(
1080 tokens,
1081 Token::LeftBrace,
1082 Token::RightBrace,
1083 |docs, tokens| {
1084 let name = parse_id(tokens)?;
1085 tokens.expect(Token::Colon)?;
1086 let ty = Type::parse(tokens)?;
1087 Ok(Field { docs, name, ty })
1088 },
1089 )?,
1090 });
1091 Ok(TypeDef {
1092 docs,
1093 attributes,
1094 name,
1095 ty,
1096 })
1097 }
1098
1099 fn parse_variant(
1100 tokens: &mut Tokenizer<'a>,
1101 docs: Docs<'a>,
1102 attributes: Vec<Attribute<'a>>,
1103 ) -> Result<Self> {
1104 tokens.expect(Token::Variant)?;
1105 let name = parse_id(tokens)?;
1106 let ty = Type::Variant(Variant {
1107 span: name.span,
1108 cases: parse_list(
1109 tokens,
1110 Token::LeftBrace,
1111 Token::RightBrace,
1112 |docs, tokens| {
1113 let name = parse_id(tokens)?;
1114 let ty = if tokens.eat(Token::LeftParen)? {
1115 let ty = Type::parse(tokens)?;
1116 tokens.expect(Token::RightParen)?;
1117 Some(ty)
1118 } else {
1119 None
1120 };
1121 Ok(Case { docs, name, ty })
1122 },
1123 )?,
1124 });
1125 Ok(TypeDef {
1126 docs,
1127 attributes,
1128 name,
1129 ty,
1130 })
1131 }
1132
1133 fn parse_enum(
1134 tokens: &mut Tokenizer<'a>,
1135 docs: Docs<'a>,
1136 attributes: Vec<Attribute<'a>>,
1137 ) -> Result<Self> {
1138 tokens.expect(Token::Enum)?;
1139 let name = parse_id(tokens)?;
1140 let ty = Type::Enum(Enum {
1141 span: name.span,
1142 cases: parse_list(
1143 tokens,
1144 Token::LeftBrace,
1145 Token::RightBrace,
1146 |docs, tokens| {
1147 let name = parse_id(tokens)?;
1148 Ok(EnumCase { docs, name })
1149 },
1150 )?,
1151 });
1152 Ok(TypeDef {
1153 docs,
1154 attributes,
1155 name,
1156 ty,
1157 })
1158 }
1159}
1160
1161impl<'a> NamedFunc<'a> {
1162 fn parse(
1163 tokens: &mut Tokenizer<'a>,
1164 docs: Docs<'a>,
1165 attributes: Vec<Attribute<'a>>,
1166 ) -> Result<Self> {
1167 let name: Id<'_> = parse_id(tokens)?;
1168 tokens.expect(expected:Token::Colon)?;
1169 let func: Func<'_> = Func::parse(tokens)?;
1170 tokens.expect_semicolon()?;
1171 Ok(NamedFunc {
1172 docs,
1173 attributes,
1174 name,
1175 func,
1176 })
1177 }
1178}
1179
1180fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
1181 match tokens.next()? {
1182 Some((span: Span, Token::Id)) => Ok(Id {
1183 name: tokens.parse_id(span)?,
1184 span,
1185 }),
1186 Some((span: Span, Token::ExplicitId)) => Ok(Id {
1187 name: tokens.parse_explicit_id(span)?,
1188 span,
1189 }),
1190 other: Option<(Span, Token)> => Err(err_expected(tokens, expected:"an identifier or string", found:other).into()),
1191 }
1192}
1193
1194fn parse_opt_version(tokens: &mut Tokenizer<'_>) -> Result<Option<(Span, Version)>> {
1195 if tokens.eat(expected:Token::At)? {
1196 parse_version(tokens).map(op:Some)
1197 } else {
1198 Ok(None)
1199 }
1200}
1201
1202fn parse_version(tokens: &mut Tokenizer<'_>) -> Result<(Span, Version)> {
1203 let start = tokens.expect(Token::Integer)?.start;
1204 tokens.expect(Token::Period)?;
1205 tokens.expect(Token::Integer)?;
1206 tokens.expect(Token::Period)?;
1207 let end = tokens.expect(Token::Integer)?.end;
1208 let mut span = Span { start, end };
1209 eat_ids(tokens, Token::Minus, &mut span)?;
1210 eat_ids(tokens, Token::Plus, &mut span)?;
1211 let string = tokens.get_span(span);
1212 let version = Version::parse(string).map_err(|e| Error::new(span, e.to_string()))?;
1213 return Ok((span, version));
1214
1215 // According to `semver.org` this is what we're parsing:
1216 //
1217 // ```ebnf
1218 // <pre-release> ::= <dot-separated pre-release identifiers>
1219 //
1220 // <dot-separated pre-release identifiers> ::= <pre-release identifier>
1221 // | <pre-release identifier> "." <dot-separated pre-release identifiers>
1222 //
1223 // <build> ::= <dot-separated build identifiers>
1224 //
1225 // <dot-separated build identifiers> ::= <build identifier>
1226 // | <build identifier> "." <dot-separated build identifiers>
1227 //
1228 // <pre-release identifier> ::= <alphanumeric identifier>
1229 // | <numeric identifier>
1230 //
1231 // <build identifier> ::= <alphanumeric identifier>
1232 // | <digits>
1233 //
1234 // <alphanumeric identifier> ::= <non-digit>
1235 // | <non-digit> <identifier characters>
1236 // | <identifier characters> <non-digit>
1237 // | <identifier characters> <non-digit> <identifier characters>
1238 //
1239 // <numeric identifier> ::= "0"
1240 // | <positive digit>
1241 // | <positive digit> <digits>
1242 //
1243 // <identifier characters> ::= <identifier character>
1244 // | <identifier character> <identifier characters>
1245 //
1246 // <identifier character> ::= <digit>
1247 // | <non-digit>
1248 //
1249 // <non-digit> ::= <letter>
1250 // | "-"
1251 //
1252 // <digits> ::= <digit>
1253 // | <digit> <digits>
1254 // ```
1255 //
1256 // This is loosely based on WIT syntax and an approximation is parsed here:
1257 //
1258 // * This function starts by parsing the optional leading `-` and `+` which
1259 // indicates pre-release and build metadata.
1260 // * Afterwards all of $id, $integer, `-`, and `.` are chomped. The only
1261 // exception here is that if `.` isn't followed by $id, $integer, or `-`
1262 // then it's assumed that it's something like `use a:b@1.0.0-a.{...}`
1263 // where the `.` is part of WIT syntax, not semver.
1264 //
1265 // Note that this additionally doesn't try to return any first-class errors.
1266 // Instead this bails out on something unrecognized for something else in
1267 // the system to return an error.
1268 fn eat_ids(tokens: &mut Tokenizer<'_>, prefix: Token, end: &mut Span) -> Result<()> {
1269 if !tokens.eat(prefix)? {
1270 return Ok(());
1271 }
1272 loop {
1273 let mut clone = tokens.clone();
1274 match clone.next()? {
1275 Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1276 end.end = span.end;
1277 *tokens = clone;
1278 }
1279 Some((_span, Token::Period)) => match clone.next()? {
1280 Some((span, Token::Id | Token::Integer | Token::Minus)) => {
1281 end.end = span.end;
1282 *tokens = clone;
1283 }
1284 _ => break Ok(()),
1285 },
1286 _ => break Ok(()),
1287 }
1288 }
1289 }
1290}
1291
1292fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
1293 let mut docs = Docs::default();
1294 let mut clone = tokens.clone();
1295 let mut started = false;
1296 while let Some((span, token)) = clone.next_raw()? {
1297 match token {
1298 Token::Whitespace => {}
1299 Token::Comment => {
1300 let comment = tokens.get_span(span);
1301 if !started {
1302 docs.span.start = span.start;
1303 started = true;
1304 }
1305 let trailing_ws = comment
1306 .bytes()
1307 .rev()
1308 .take_while(|ch| ch.is_ascii_whitespace())
1309 .count();
1310 docs.span.end = span.end - (trailing_ws as u32);
1311 docs.docs.push(comment.into());
1312 }
1313 _ => break,
1314 };
1315 *tokens = clone.clone();
1316 }
1317 Ok(docs)
1318}
1319
1320impl<'a> Type<'a> {
1321 fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
1322 match tokens.next()? {
1323 Some((span, Token::U8)) => Ok(Type::U8(span)),
1324 Some((span, Token::U16)) => Ok(Type::U16(span)),
1325 Some((span, Token::U32)) => Ok(Type::U32(span)),
1326 Some((span, Token::U64)) => Ok(Type::U64(span)),
1327 Some((span, Token::S8)) => Ok(Type::S8(span)),
1328 Some((span, Token::S16)) => Ok(Type::S16(span)),
1329 Some((span, Token::S32)) => Ok(Type::S32(span)),
1330 Some((span, Token::S64)) => Ok(Type::S64(span)),
1331 Some((span, Token::F32)) => Ok(Type::F32(span)),
1332 Some((span, Token::F64)) => Ok(Type::F64(span)),
1333 Some((span, Token::Char)) => Ok(Type::Char(span)),
1334
1335 // tuple<T, U, ...>
1336 Some((span, Token::Tuple)) => {
1337 let types = parse_list(
1338 tokens,
1339 Token::LessThan,
1340 Token::GreaterThan,
1341 |_docs, tokens| Type::parse(tokens),
1342 )?;
1343 Ok(Type::Tuple(Tuple { span, types }))
1344 }
1345
1346 Some((span, Token::Bool)) => Ok(Type::Bool(span)),
1347 Some((span, Token::String_)) => Ok(Type::String(span)),
1348
1349 // list<T>
1350 Some((span, Token::List)) => {
1351 tokens.expect(Token::LessThan)?;
1352 let ty = Type::parse(tokens)?;
1353 tokens.expect(Token::GreaterThan)?;
1354 Ok(Type::List(List {
1355 span,
1356 ty: Box::new(ty),
1357 }))
1358 }
1359
1360 // option<T>
1361 Some((span, Token::Option_)) => {
1362 tokens.expect(Token::LessThan)?;
1363 let ty = Type::parse(tokens)?;
1364 tokens.expect(Token::GreaterThan)?;
1365 Ok(Type::Option(Option_ {
1366 span,
1367 ty: Box::new(ty),
1368 }))
1369 }
1370
1371 // result<T, E>
1372 // result<_, E>
1373 // result<T>
1374 // result
1375 Some((span, Token::Result_)) => {
1376 let mut ok = None;
1377 let mut err = None;
1378
1379 if tokens.eat(Token::LessThan)? {
1380 if tokens.eat(Token::Underscore)? {
1381 tokens.expect(Token::Comma)?;
1382 err = Some(Box::new(Type::parse(tokens)?));
1383 } else {
1384 ok = Some(Box::new(Type::parse(tokens)?));
1385 if tokens.eat(Token::Comma)? {
1386 err = Some(Box::new(Type::parse(tokens)?));
1387 }
1388 };
1389 tokens.expect(Token::GreaterThan)?;
1390 };
1391 Ok(Type::Result(Result_ { span, ok, err }))
1392 }
1393
1394 // future<T>
1395 // future
1396 Some((span, Token::Future)) => {
1397 let mut ty = None;
1398
1399 if tokens.eat(Token::LessThan)? {
1400 ty = Some(Box::new(Type::parse(tokens)?));
1401 tokens.expect(Token::GreaterThan)?;
1402 };
1403 Ok(Type::Future(Future { span, ty }))
1404 }
1405
1406 // stream<T>
1407 Some((span, Token::Stream)) => {
1408 tokens.expect(Token::LessThan)?;
1409 let ty = Type::parse(tokens)?;
1410 tokens.expect(Token::GreaterThan)?;
1411 Ok(Type::Stream(Stream {
1412 span,
1413 ty: Box::new(ty),
1414 }))
1415 }
1416
1417 // error-context
1418 Some((span, Token::ErrorContext)) => Ok(Type::ErrorContext(span)),
1419
1420 // own<T>
1421 Some((_span, Token::Own)) => {
1422 tokens.expect(Token::LessThan)?;
1423 let resource = parse_id(tokens)?;
1424 tokens.expect(Token::GreaterThan)?;
1425 Ok(Type::Handle(Handle::Own { resource }))
1426 }
1427
1428 // borrow<T>
1429 Some((_span, Token::Borrow)) => {
1430 tokens.expect(Token::LessThan)?;
1431 let resource = parse_id(tokens)?;
1432 tokens.expect(Token::GreaterThan)?;
1433 Ok(Type::Handle(Handle::Borrow { resource }))
1434 }
1435
1436 // `foo`
1437 Some((span, Token::Id)) => Ok(Type::Name(Id {
1438 name: tokens.parse_id(span)?.into(),
1439 span,
1440 })),
1441 // `%foo`
1442 Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
1443 name: tokens.parse_explicit_id(span)?.into(),
1444 span,
1445 })),
1446
1447 other => Err(err_expected(tokens, "a type", other).into()),
1448 }
1449 }
1450
1451 fn span(&self) -> Span {
1452 match self {
1453 Type::Bool(span)
1454 | Type::U8(span)
1455 | Type::U16(span)
1456 | Type::U32(span)
1457 | Type::U64(span)
1458 | Type::S8(span)
1459 | Type::S16(span)
1460 | Type::S32(span)
1461 | Type::S64(span)
1462 | Type::F32(span)
1463 | Type::F64(span)
1464 | Type::Char(span)
1465 | Type::String(span)
1466 | Type::ErrorContext(span) => *span,
1467 Type::Name(id) => id.span,
1468 Type::List(l) => l.span,
1469 Type::Handle(h) => h.span(),
1470 Type::Resource(r) => r.span,
1471 Type::Record(r) => r.span,
1472 Type::Flags(f) => f.span,
1473 Type::Variant(v) => v.span,
1474 Type::Tuple(t) => t.span,
1475 Type::Enum(e) => e.span,
1476 Type::Option(o) => o.span,
1477 Type::Result(r) => r.span,
1478 Type::Future(f) => f.span,
1479 Type::Stream(s) => s.span,
1480 }
1481 }
1482}
1483
1484fn parse_list<'a, T>(
1485 tokens: &mut Tokenizer<'a>,
1486 start: Token,
1487 end: Token,
1488 parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1489) -> Result<Vec<T>> {
1490 tokens.expect(expected:start)?;
1491 parse_list_trailer(tokens, end, parse)
1492}
1493
1494fn parse_list_trailer<'a, T>(
1495 tokens: &mut Tokenizer<'a>,
1496 end: Token,
1497 mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
1498) -> Result<Vec<T>> {
1499 let mut items: Vec = Vec::new();
1500 loop {
1501 // get docs before we skip them to try to eat the end token
1502 let docs: Docs<'_> = parse_docs(tokens)?;
1503
1504 // if we found an end token then we're done
1505 if tokens.eat(expected:end)? {
1506 break;
1507 }
1508
1509 let item: T = parse(docs, tokens)?;
1510 items.push(item);
1511
1512 // if there's no trailing comma then this is required to be the end,
1513 // otherwise we go through the loop to try to get another item
1514 if !tokens.eat(expected:Token::Comma)? {
1515 tokens.expect(expected:end)?;
1516 break;
1517 }
1518 }
1519 Ok(items)
1520}
1521
1522fn err_expected(
1523 tokens: &Tokenizer<'_>,
1524 expected: &'static str,
1525 found: Option<(Span, Token)>,
1526) -> Error {
1527 match found {
1528 Some((span: Span, token: Token)) => Error::new(
1529 span,
1530 msg:format!("expected {}, found {}", expected, token.describe()),
1531 ),
1532 None => Error::new(
1533 tokens.eof_span(),
1534 msg:format!("expected {}, found eof", expected),
1535 ),
1536 }
1537}
1538
1539enum Attribute<'a> {
1540 Since { span: Span, version: Version },
1541 Unstable { span: Span, feature: Id<'a> },
1542 Deprecated { span: Span, version: Version },
1543}
1544
1545impl<'a> Attribute<'a> {
1546 fn parse_list(tokens: &mut Tokenizer<'a>) -> Result<Vec<Attribute<'a>>> {
1547 let mut ret = Vec::new();
1548 while tokens.eat(Token::At)? {
1549 let id = parse_id(tokens)?;
1550 let attr = match id.name {
1551 "since" => {
1552 tokens.expect(Token::LeftParen)?;
1553 eat_id(tokens, "version")?;
1554 tokens.expect(Token::Equals)?;
1555 let (_span, version) = parse_version(tokens)?;
1556 tokens.expect(Token::RightParen)?;
1557 Attribute::Since {
1558 span: id.span,
1559 version,
1560 }
1561 }
1562 "unstable" => {
1563 tokens.expect(Token::LeftParen)?;
1564 eat_id(tokens, "feature")?;
1565 tokens.expect(Token::Equals)?;
1566 let feature = parse_id(tokens)?;
1567 tokens.expect(Token::RightParen)?;
1568 Attribute::Unstable {
1569 span: id.span,
1570 feature,
1571 }
1572 }
1573 "deprecated" => {
1574 tokens.expect(Token::LeftParen)?;
1575 eat_id(tokens, "version")?;
1576 tokens.expect(Token::Equals)?;
1577 let (_span, version) = parse_version(tokens)?;
1578 tokens.expect(Token::RightParen)?;
1579 Attribute::Deprecated {
1580 span: id.span,
1581 version,
1582 }
1583 }
1584 other => {
1585 bail!(Error::new(id.span, format!("unknown attribute `{other}`"),))
1586 }
1587 };
1588 ret.push(attr);
1589 }
1590 Ok(ret)
1591 }
1592
1593 fn span(&self) -> Span {
1594 match self {
1595 Attribute::Since { span, .. }
1596 | Attribute::Unstable { span, .. }
1597 | Attribute::Deprecated { span, .. } => *span,
1598 }
1599 }
1600}
1601
1602fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result<Span> {
1603 let id: Id<'_> = parse_id(tokens)?;
1604 if id.name != expected {
1605 bail!(Error::new(
1606 id.span,
1607 format!("expected `{expected}`, found `{}`", id.name),
1608 ));
1609 }
1610 Ok(id.span)
1611}
1612
1613/// A listing of source files which are used to get parsed into an
1614/// [`UnresolvedPackage`].
1615#[derive(Clone, Default)]
1616pub struct SourceMap {
1617 sources: Vec<Source>,
1618 offset: u32,
1619 require_f32_f64: Option<bool>,
1620}
1621
1622#[derive(Clone)]
1623struct Source {
1624 offset: u32,
1625 path: PathBuf,
1626 contents: String,
1627}
1628
1629impl SourceMap {
1630 /// Creates a new empty source map.
1631 pub fn new() -> SourceMap {
1632 SourceMap::default()
1633 }
1634
1635 #[doc(hidden)] // NB: only here for a transitionary period
1636 pub fn set_require_f32_f64(&mut self, enable: bool) {
1637 self.require_f32_f64 = Some(enable);
1638 }
1639
1640 /// Reads the file `path` on the filesystem and appends its contents to this
1641 /// [`SourceMap`].
1642 pub fn push_file(&mut self, path: &Path) -> Result<()> {
1643 let contents = std::fs::read_to_string(path)
1644 .with_context(|| format!("failed to read file {path:?}"))?;
1645 self.push(path, contents);
1646 Ok(())
1647 }
1648
1649 /// Appends the given contents with the given path into this source map.
1650 ///
1651 /// The `path` provided is not read from the filesystem and is instead only
1652 /// used during error messages. Each file added to a [`SourceMap`] is
1653 /// used to create the final parsed package namely by unioning all the
1654 /// interfaces and worlds defined together. Note that each file has its own
1655 /// personal namespace, however, for top-level `use` and such.
1656 pub fn push(&mut self, path: &Path, contents: impl Into<String>) {
1657 let mut contents = contents.into();
1658 // Guarantee that there's at least one character in these contents by
1659 // appending a single newline to the end. This is excluded from
1660 // tokenization below so it's only here to ensure that spans which point
1661 // one byte beyond the end of a file (eof) point to the same original
1662 // file.
1663 contents.push('\n');
1664 let new_offset = self.offset + u32::try_from(contents.len()).unwrap();
1665 self.sources.push(Source {
1666 offset: self.offset,
1667 path: path.to_path_buf(),
1668 contents,
1669 });
1670 self.offset = new_offset;
1671 }
1672
1673 /// Parses the files added to this source map into a
1674 /// [`UnresolvedPackageGroup`].
1675 pub fn parse(self) -> Result<UnresolvedPackageGroup> {
1676 let mut nested = Vec::new();
1677 let main = self.rewrite_error(|| {
1678 let mut resolver = Resolver::default();
1679 let mut srcs = self.sources.iter().collect::<Vec<_>>();
1680 srcs.sort_by_key(|src| &src.path);
1681
1682 // Parse each source file individually. A tokenizer is created here
1683 // form settings and then `PackageFile` is used to parse the whole
1684 // stream of tokens.
1685 for src in srcs {
1686 let mut tokens = Tokenizer::new(
1687 // chop off the forcibly appended `\n` character when
1688 // passing through the source to get tokenized.
1689 &src.contents[..src.contents.len() - 1],
1690 src.offset,
1691 self.require_f32_f64,
1692 )
1693 .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?;
1694 let mut file = PackageFile::parse(&mut tokens)?;
1695
1696 // Filter out any nested packages and resolve them separately.
1697 // Nested packages have only a single "file" so only one item
1698 // is pushed into a `Resolver`. Note that a nested `Resolver`
1699 // is used here, not the outer one.
1700 //
1701 // Note that filtering out `Package` items is required due to
1702 // how the implementation of disallowing nested packages in
1703 // nested packages currently works.
1704 for item in mem::take(&mut file.decl_list.items) {
1705 match item {
1706 AstItem::Package(nested_pkg) => {
1707 let mut resolve = Resolver::default();
1708 resolve.push(nested_pkg).with_context(|| {
1709 format!(
1710 "failed to handle nested package in: {}",
1711 src.path.display()
1712 )
1713 })?;
1714
1715 nested.push(resolve.resolve()?);
1716 }
1717 other => file.decl_list.items.push(other),
1718 }
1719 }
1720
1721 // With nested packages handled push this file into the
1722 // resolver.
1723 resolver.push(file).with_context(|| {
1724 format!("failed to start resolving path: {}", src.path.display())
1725 })?;
1726 }
1727 Ok(resolver.resolve()?)
1728 })?;
1729 Ok(UnresolvedPackageGroup {
1730 main,
1731 nested,
1732 source_map: self,
1733 })
1734 }
1735
1736 pub(crate) fn rewrite_error<F, T>(&self, f: F) -> Result<T>
1737 where
1738 F: FnOnce() -> Result<T>,
1739 {
1740 let mut err = match f() {
1741 Ok(t) => return Ok(t),
1742 Err(e) => e,
1743 };
1744 if let Some(parse) = err.downcast_mut::<Error>() {
1745 if parse.highlighted.is_none() {
1746 let msg = self.highlight_err(parse.span.start, Some(parse.span.end), &parse.msg);
1747 parse.highlighted = Some(msg);
1748 }
1749 }
1750 if let Some(_) = err.downcast_mut::<Error>() {
1751 return Err(err);
1752 }
1753 if let Some(notfound) = err.downcast_mut::<PackageNotFoundError>() {
1754 if notfound.highlighted.is_none() {
1755 let msg = self.highlight_err(
1756 notfound.span.start,
1757 Some(notfound.span.end),
1758 &format!("{notfound}"),
1759 );
1760 notfound.highlighted = Some(msg);
1761 }
1762 }
1763 if let Some(_) = err.downcast_mut::<PackageNotFoundError>() {
1764 return Err(err);
1765 }
1766
1767 if let Some(lex) = err.downcast_ref::<lex::Error>() {
1768 let pos = match lex {
1769 lex::Error::Unexpected(at, _)
1770 | lex::Error::UnterminatedComment(at)
1771 | lex::Error::Wanted { at, .. }
1772 | lex::Error::InvalidCharInId(at, _)
1773 | lex::Error::IdPartEmpty(at)
1774 | lex::Error::InvalidEscape(at, _) => *at,
1775 };
1776 let msg = self.highlight_err(pos, None, lex);
1777 bail!("{msg}")
1778 }
1779
1780 if let Some(sort) = err.downcast_mut::<toposort::Error>() {
1781 if sort.highlighted().is_none() {
1782 let span = match sort {
1783 toposort::Error::NonexistentDep { span, .. }
1784 | toposort::Error::Cycle { span, .. } => *span,
1785 };
1786 let highlighted = self.highlight_err(span.start, Some(span.end), &sort);
1787 sort.set_highlighted(highlighted);
1788 }
1789 }
1790
1791 Err(err)
1792 }
1793
1794 fn highlight_err(&self, start: u32, end: Option<u32>, err: impl fmt::Display) -> String {
1795 let src = self.source_for_offset(start);
1796 let start = src.to_relative_offset(start);
1797 let end = end.map(|end| src.to_relative_offset(end));
1798 let (line, col) = src.linecol(start);
1799 let snippet = src.contents.lines().nth(line).unwrap_or("");
1800 let mut msg = format!(
1801 "\
1802{err}
1803 --> {file}:{line}:{col}
1804 |
1805 {line:4} | {snippet}
1806 | {marker:>0$}",
1807 col + 1,
1808 file = src.path.display(),
1809 line = line + 1,
1810 col = col + 1,
1811 marker = "^",
1812 );
1813 if let Some(end) = end {
1814 if let Some(s) = src.contents.get(start..end) {
1815 for _ in s.chars().skip(1) {
1816 msg.push('-');
1817 }
1818 }
1819 }
1820 return msg;
1821 }
1822
1823 pub(crate) fn render_location(&self, span: Span) -> String {
1824 let src = self.source_for_offset(span.start);
1825 let start = src.to_relative_offset(span.start);
1826 let (line, col) = src.linecol(start);
1827 format!(
1828 "{file}:{line}:{col}",
1829 file = src.path.display(),
1830 line = line + 1,
1831 col = col + 1,
1832 )
1833 }
1834
1835 fn source_for_offset(&self, start: u32) -> &Source {
1836 let i = match self.sources.binary_search_by_key(&start, |src| src.offset) {
1837 Ok(i) => i,
1838 Err(i) => i - 1,
1839 };
1840 &self.sources[i]
1841 }
1842
1843 /// Returns an iterator over all filenames added to this source map.
1844 pub fn source_files(&self) -> impl Iterator<Item = &Path> {
1845 self.sources.iter().map(|src| src.path.as_path())
1846 }
1847}
1848
1849impl Source {
1850 fn to_relative_offset(&self, offset: u32) -> usize {
1851 usize::try_from(offset - self.offset).unwrap()
1852 }
1853
1854 fn linecol(&self, relative_offset: usize) -> (usize, usize) {
1855 let mut cur: usize = 0;
1856 // Use split_terminator instead of lines so that if there is a `\r`,
1857 // it is included in the offset calculation. The `+1` values below
1858 // account for the `\n`.
1859 for (i: usize, line: &str) in self.contents.split_terminator('\n').enumerate() {
1860 if cur + line.len() + 1 > relative_offset {
1861 return (i, relative_offset - cur);
1862 }
1863 cur += line.len() + 1;
1864 }
1865 (self.contents.lines().count(), 0)
1866 }
1867}
1868
1869pub enum ParsedUsePath {
1870 Name(String),
1871 Package(crate::PackageName, String),
1872}
1873
1874pub fn parse_use_path(s: &str) -> Result<ParsedUsePath> {
1875 let mut tokens: Tokenizer<'_> = Tokenizer::new(input:s, span_offset:0, require_f32_f64:None)?;
1876 let path: UsePath<'_> = UsePath::parse(&mut tokens)?;
1877 if tokens.next()?.is_some() {
1878 bail!("trailing tokens in path specifier");
1879 }
1880 Ok(match path {
1881 UsePath::Id(id: Id<'_>) => ParsedUsePath::Name(id.name.to_string()),
1882 UsePath::Package { id: PackageName<'_>, name: Id<'_> } => {
1883 ParsedUsePath::Package(id.package_name(), name.name.to_string())
1884 }
1885 })
1886}
1887