1use crate::scanner::*;
2use std::collections::HashMap;
3
4#[derive(Clone, Copy, PartialEq, Debug, Eq)]
5enum State {
6 StreamStart,
7 ImplicitDocumentStart,
8 DocumentStart,
9 DocumentContent,
10 DocumentEnd,
11 BlockNode,
12 // BlockNodeOrIndentlessSequence,
13 // FlowNode,
14 BlockSequenceFirstEntry,
15 BlockSequenceEntry,
16 IndentlessSequenceEntry,
17 BlockMappingFirstKey,
18 BlockMappingKey,
19 BlockMappingValue,
20 FlowSequenceFirstEntry,
21 FlowSequenceEntry,
22 FlowSequenceEntryMappingKey,
23 FlowSequenceEntryMappingValue,
24 FlowSequenceEntryMappingEnd,
25 FlowMappingFirstKey,
26 FlowMappingKey,
27 FlowMappingValue,
28 FlowMappingEmptyValue,
29 End,
30}
31
32/// `Event` is used with the low-level event base parsing API,
33/// see `EventReceiver` trait.
34#[derive(Clone, PartialEq, Debug, Eq)]
35pub enum Event {
36 /// Reserved for internal use
37 Nothing,
38 StreamStart,
39 StreamEnd,
40 DocumentStart,
41 DocumentEnd,
42 /// Refer to an anchor ID
43 Alias(usize),
44 /// Value, style, anchor_id, tag
45 Scalar(String, TScalarStyle, usize, Option<TokenType>),
46 /// Anchor ID
47 SequenceStart(usize),
48 SequenceEnd,
49 /// Anchor ID
50 MappingStart(usize),
51 MappingEnd,
52}
53
54impl Event {
55 fn empty_scalar() -> Event {
56 // a null scalar
57 Event::Scalar("~".to_owned(), TScalarStyle::Plain, 0, None)
58 }
59
60 fn empty_scalar_with_anchor(anchor: usize, tag: Option<TokenType>) -> Event {
61 Event::Scalar("".to_owned(), TScalarStyle::Plain, anchor, tag)
62 }
63}
64
65#[derive(Debug)]
66pub struct Parser<T> {
67 scanner: Scanner<T>,
68 states: Vec<State>,
69 state: State,
70 marks: Vec<Marker>,
71 token: Option<Token>,
72 current: Option<(Event, Marker)>,
73 anchors: HashMap<String, usize>,
74 anchor_id: usize,
75}
76
77pub trait EventReceiver {
78 fn on_event(&mut self, ev: Event);
79}
80
81pub trait MarkedEventReceiver {
82 fn on_event(&mut self, ev: Event, _mark: Marker);
83}
84
85impl<R: EventReceiver> MarkedEventReceiver for R {
86 fn on_event(&mut self, ev: Event, _mark: Marker) {
87 self.on_event(ev)
88 }
89}
90
91pub type ParseResult = Result<(Event, Marker), ScanError>;
92
93impl<T: Iterator<Item = char>> Parser<T> {
94 pub fn new(src: T) -> Parser<T> {
95 Parser {
96 scanner: Scanner::new(src),
97 states: Vec::new(),
98 state: State::StreamStart,
99 marks: Vec::new(),
100 token: None,
101 current: None,
102
103 anchors: HashMap::new(),
104 // valid anchor_id starts from 1
105 anchor_id: 1,
106 }
107 }
108
109 pub fn peek(&mut self) -> Result<&(Event, Marker), ScanError> {
110 match self.current {
111 Some(ref x) => Ok(x),
112 None => {
113 self.current = Some(self.next()?);
114 self.peek()
115 }
116 }
117 }
118
119 pub fn next(&mut self) -> ParseResult {
120 match self.current {
121 None => self.parse(),
122 Some(_) => Ok(self.current.take().unwrap()),
123 }
124 }
125
126 fn peek_token(&mut self) -> Result<&Token, ScanError> {
127 match self.token {
128 None => {
129 self.token = Some(self.scan_next_token()?);
130 Ok(self.token.as_ref().unwrap())
131 }
132 Some(ref tok) => Ok(tok),
133 }
134 }
135
136 fn scan_next_token(&mut self) -> Result<Token, ScanError> {
137 let token = self.scanner.next();
138 match token {
139 None => match self.scanner.get_error() {
140 None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
141 Some(e) => Err(e),
142 },
143 Some(tok) => Ok(tok),
144 }
145 }
146
147 fn fetch_token(&mut self) -> Token {
148 self.token
149 .take()
150 .expect("fetch_token needs to be preceded by peek_token")
151 }
152
153 fn skip(&mut self) {
154 self.token = None;
155 //self.peek_token();
156 }
157 fn pop_state(&mut self) {
158 self.state = self.states.pop().unwrap()
159 }
160 fn push_state(&mut self, state: State) {
161 self.states.push(state);
162 }
163
164 fn parse(&mut self) -> ParseResult {
165 if self.state == State::End {
166 return Ok((Event::StreamEnd, self.scanner.mark()));
167 }
168 let (ev, mark) = self.state_machine()?;
169 // println!("EV {:?}", ev);
170 Ok((ev, mark))
171 }
172
173 pub fn load<R: MarkedEventReceiver>(
174 &mut self,
175 recv: &mut R,
176 multi: bool,
177 ) -> Result<(), ScanError> {
178 if !self.scanner.stream_started() {
179 let (ev, mark) = self.next()?;
180 assert_eq!(ev, Event::StreamStart);
181 recv.on_event(ev, mark);
182 }
183
184 if self.scanner.stream_ended() {
185 // XXX has parsed?
186 recv.on_event(Event::StreamEnd, self.scanner.mark());
187 return Ok(());
188 }
189 loop {
190 let (ev, mark) = self.next()?;
191 if ev == Event::StreamEnd {
192 recv.on_event(ev, mark);
193 return Ok(());
194 }
195 // clear anchors before a new document
196 self.anchors.clear();
197 self.load_document(ev, mark, recv)?;
198 if !multi {
199 break;
200 }
201 }
202 Ok(())
203 }
204
205 fn load_document<R: MarkedEventReceiver>(
206 &mut self,
207 first_ev: Event,
208 mark: Marker,
209 recv: &mut R,
210 ) -> Result<(), ScanError> {
211 assert_eq!(first_ev, Event::DocumentStart);
212 recv.on_event(first_ev, mark);
213
214 let (ev, mark) = self.next()?;
215 self.load_node(ev, mark, recv)?;
216
217 // DOCUMENT-END is expected.
218 let (ev, mark) = self.next()?;
219 assert_eq!(ev, Event::DocumentEnd);
220 recv.on_event(ev, mark);
221
222 Ok(())
223 }
224
225 fn load_node<R: MarkedEventReceiver>(
226 &mut self,
227 first_ev: Event,
228 mark: Marker,
229 recv: &mut R,
230 ) -> Result<(), ScanError> {
231 match first_ev {
232 Event::Alias(..) | Event::Scalar(..) => {
233 recv.on_event(first_ev, mark);
234 Ok(())
235 }
236 Event::SequenceStart(_) => {
237 recv.on_event(first_ev, mark);
238 self.load_sequence(recv)
239 }
240 Event::MappingStart(_) => {
241 recv.on_event(first_ev, mark);
242 self.load_mapping(recv)
243 }
244 _ => {
245 println!("UNREACHABLE EVENT: {:?}", first_ev);
246 unreachable!();
247 }
248 }
249 }
250
251 fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
252 let (mut key_ev, mut key_mark) = self.next()?;
253 while key_ev != Event::MappingEnd {
254 // key
255 self.load_node(key_ev, key_mark, recv)?;
256
257 // value
258 let (ev, mark) = self.next()?;
259 self.load_node(ev, mark, recv)?;
260
261 // next event
262 let (ev, mark) = self.next()?;
263 key_ev = ev;
264 key_mark = mark;
265 }
266 recv.on_event(key_ev, key_mark);
267 Ok(())
268 }
269
270 fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
271 let (mut ev, mut mark) = self.next()?;
272 while ev != Event::SequenceEnd {
273 self.load_node(ev, mark, recv)?;
274
275 // next event
276 let (next_ev, next_mark) = self.next()?;
277 ev = next_ev;
278 mark = next_mark;
279 }
280 recv.on_event(ev, mark);
281 Ok(())
282 }
283
284 fn state_machine(&mut self) -> ParseResult {
285 // let next_tok = self.peek_token()?;
286 // println!("cur_state {:?}, next tok: {:?}", self.state, next_tok);
287 match self.state {
288 State::StreamStart => self.stream_start(),
289
290 State::ImplicitDocumentStart => self.document_start(true),
291 State::DocumentStart => self.document_start(false),
292 State::DocumentContent => self.document_content(),
293 State::DocumentEnd => self.document_end(),
294
295 State::BlockNode => self.parse_node(true, false),
296 // State::BlockNodeOrIndentlessSequence => self.parse_node(true, true),
297 // State::FlowNode => self.parse_node(false, false),
298 State::BlockMappingFirstKey => self.block_mapping_key(true),
299 State::BlockMappingKey => self.block_mapping_key(false),
300 State::BlockMappingValue => self.block_mapping_value(),
301
302 State::BlockSequenceFirstEntry => self.block_sequence_entry(true),
303 State::BlockSequenceEntry => self.block_sequence_entry(false),
304
305 State::FlowSequenceFirstEntry => self.flow_sequence_entry(true),
306 State::FlowSequenceEntry => self.flow_sequence_entry(false),
307
308 State::FlowMappingFirstKey => self.flow_mapping_key(true),
309 State::FlowMappingKey => self.flow_mapping_key(false),
310 State::FlowMappingValue => self.flow_mapping_value(false),
311
312 State::IndentlessSequenceEntry => self.indentless_sequence_entry(),
313
314 State::FlowSequenceEntryMappingKey => self.flow_sequence_entry_mapping_key(),
315 State::FlowSequenceEntryMappingValue => self.flow_sequence_entry_mapping_value(),
316 State::FlowSequenceEntryMappingEnd => self.flow_sequence_entry_mapping_end(),
317 State::FlowMappingEmptyValue => self.flow_mapping_value(true),
318
319 /* impossible */
320 State::End => unreachable!(),
321 }
322 }
323
324 fn stream_start(&mut self) -> ParseResult {
325 match *self.peek_token()? {
326 Token(mark, TokenType::StreamStart(_)) => {
327 self.state = State::ImplicitDocumentStart;
328 self.skip();
329 Ok((Event::StreamStart, mark))
330 }
331 Token(mark, _) => Err(ScanError::new(mark, "did not find expected <stream-start>")),
332 }
333 }
334
335 fn document_start(&mut self, implicit: bool) -> ParseResult {
336 if !implicit {
337 while let TokenType::DocumentEnd = self.peek_token()?.1 {
338 self.skip();
339 }
340 }
341
342 match *self.peek_token()? {
343 Token(mark, TokenType::StreamEnd) => {
344 self.state = State::End;
345 self.skip();
346 Ok((Event::StreamEnd, mark))
347 }
348 Token(_, TokenType::VersionDirective(..))
349 | Token(_, TokenType::TagDirective(..))
350 | Token(_, TokenType::DocumentStart) => {
351 // explicit document
352 self._explicit_document_start()
353 }
354 Token(mark, _) if implicit => {
355 self.parser_process_directives()?;
356 self.push_state(State::DocumentEnd);
357 self.state = State::BlockNode;
358 Ok((Event::DocumentStart, mark))
359 }
360 _ => {
361 // explicit document
362 self._explicit_document_start()
363 }
364 }
365 }
366
367 fn parser_process_directives(&mut self) -> Result<(), ScanError> {
368 loop {
369 match self.peek_token()?.1 {
370 TokenType::VersionDirective(_, _) => {
371 // XXX parsing with warning according to spec
372 //if major != 1 || minor > 2 {
373 // return Err(ScanError::new(tok.0,
374 // "found incompatible YAML document"));
375 //}
376 }
377 TokenType::TagDirective(..) => {
378 // TODO add tag directive
379 }
380 _ => break,
381 }
382 self.skip();
383 }
384 // TODO tag directive
385 Ok(())
386 }
387
388 fn _explicit_document_start(&mut self) -> ParseResult {
389 self.parser_process_directives()?;
390 match *self.peek_token()? {
391 Token(mark, TokenType::DocumentStart) => {
392 self.push_state(State::DocumentEnd);
393 self.state = State::DocumentContent;
394 self.skip();
395 Ok((Event::DocumentStart, mark))
396 }
397 Token(mark, _) => Err(ScanError::new(
398 mark,
399 "did not find expected <document start>",
400 )),
401 }
402 }
403
404 fn document_content(&mut self) -> ParseResult {
405 match *self.peek_token()? {
406 Token(mark, TokenType::VersionDirective(..))
407 | Token(mark, TokenType::TagDirective(..))
408 | Token(mark, TokenType::DocumentStart)
409 | Token(mark, TokenType::DocumentEnd)
410 | Token(mark, TokenType::StreamEnd) => {
411 self.pop_state();
412 // empty scalar
413 Ok((Event::empty_scalar(), mark))
414 }
415 _ => self.parse_node(true, false),
416 }
417 }
418
419 fn document_end(&mut self) -> ParseResult {
420 let mut _implicit = true;
421 let marker: Marker = match *self.peek_token()? {
422 Token(mark, TokenType::DocumentEnd) => {
423 self.skip();
424 _implicit = false;
425 mark
426 }
427 Token(mark, _) => mark,
428 };
429
430 // TODO tag handling
431 self.state = State::DocumentStart;
432 Ok((Event::DocumentEnd, marker))
433 }
434
435 fn register_anchor(&mut self, name: String, _: &Marker) -> Result<usize, ScanError> {
436 // anchors can be overridden/reused
437 // if self.anchors.contains_key(name) {
438 // return Err(ScanError::new(*mark,
439 // "while parsing anchor, found duplicated anchor"));
440 // }
441 let new_id = self.anchor_id;
442 self.anchor_id += 1;
443 self.anchors.insert(name, new_id);
444 Ok(new_id)
445 }
446
447 fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult {
448 let mut anchor_id = 0;
449 let mut tag = None;
450 match *self.peek_token()? {
451 Token(_, TokenType::Alias(_)) => {
452 self.pop_state();
453 if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
454 match self.anchors.get(&name) {
455 None => {
456 return Err(ScanError::new(
457 mark,
458 "while parsing node, found unknown anchor",
459 ))
460 }
461 Some(id) => return Ok((Event::Alias(*id), mark)),
462 }
463 } else {
464 unreachable!()
465 }
466 }
467 Token(_, TokenType::Anchor(_)) => {
468 if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
469 anchor_id = self.register_anchor(name, &mark)?;
470 if let TokenType::Tag(..) = self.peek_token()?.1 {
471 if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
472 tag = Some(tg);
473 } else {
474 unreachable!()
475 }
476 }
477 } else {
478 unreachable!()
479 }
480 }
481 Token(_, TokenType::Tag(..)) => {
482 if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
483 tag = Some(tg);
484 if let TokenType::Anchor(_) = self.peek_token()?.1 {
485 if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
486 anchor_id = self.register_anchor(name, &mark)?;
487 } else {
488 unreachable!()
489 }
490 }
491 } else {
492 unreachable!()
493 }
494 }
495 _ => {}
496 }
497 match *self.peek_token()? {
498 Token(mark, TokenType::BlockEntry) if indentless_sequence => {
499 self.state = State::IndentlessSequenceEntry;
500 Ok((Event::SequenceStart(anchor_id), mark))
501 }
502 Token(_, TokenType::Scalar(..)) => {
503 self.pop_state();
504 if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() {
505 Ok((Event::Scalar(v, style, anchor_id, tag), mark))
506 } else {
507 unreachable!()
508 }
509 }
510 Token(mark, TokenType::FlowSequenceStart) => {
511 self.state = State::FlowSequenceFirstEntry;
512 Ok((Event::SequenceStart(anchor_id), mark))
513 }
514 Token(mark, TokenType::FlowMappingStart) => {
515 self.state = State::FlowMappingFirstKey;
516 Ok((Event::MappingStart(anchor_id), mark))
517 }
518 Token(mark, TokenType::BlockSequenceStart) if block => {
519 self.state = State::BlockSequenceFirstEntry;
520 Ok((Event::SequenceStart(anchor_id), mark))
521 }
522 Token(mark, TokenType::BlockMappingStart) if block => {
523 self.state = State::BlockMappingFirstKey;
524 Ok((Event::MappingStart(anchor_id), mark))
525 }
526 // ex 7.2, an empty scalar can follow a secondary tag
527 Token(mark, _) if tag.is_some() || anchor_id > 0 => {
528 self.pop_state();
529 Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark))
530 }
531 Token(mark, _) => Err(ScanError::new(
532 mark,
533 "while parsing a node, did not find expected node content",
534 )),
535 }
536 }
537
538 fn block_mapping_key(&mut self, first: bool) -> ParseResult {
539 // skip BlockMappingStart
540 if first {
541 let _ = self.peek_token()?;
542 //self.marks.push(tok.0);
543 self.skip();
544 }
545 match *self.peek_token()? {
546 Token(_, TokenType::Key) => {
547 self.skip();
548 match *self.peek_token()? {
549 Token(mark, TokenType::Key)
550 | Token(mark, TokenType::Value)
551 | Token(mark, TokenType::BlockEnd) => {
552 self.state = State::BlockMappingValue;
553 // empty scalar
554 Ok((Event::empty_scalar(), mark))
555 }
556 _ => {
557 self.push_state(State::BlockMappingValue);
558 self.parse_node(true, true)
559 }
560 }
561 }
562 // XXX(chenyh): libyaml failed to parse spec 1.2, ex8.18
563 Token(mark, TokenType::Value) => {
564 self.state = State::BlockMappingValue;
565 Ok((Event::empty_scalar(), mark))
566 }
567 Token(mark, TokenType::BlockEnd) => {
568 self.pop_state();
569 self.skip();
570 Ok((Event::MappingEnd, mark))
571 }
572 Token(mark, _) => Err(ScanError::new(
573 mark,
574 "while parsing a block mapping, did not find expected key",
575 )),
576 }
577 }
578
579 fn block_mapping_value(&mut self) -> ParseResult {
580 match *self.peek_token()? {
581 Token(_, TokenType::Value) => {
582 self.skip();
583 match *self.peek_token()? {
584 Token(mark, TokenType::Key)
585 | Token(mark, TokenType::Value)
586 | Token(mark, TokenType::BlockEnd) => {
587 self.state = State::BlockMappingKey;
588 // empty scalar
589 Ok((Event::empty_scalar(), mark))
590 }
591 _ => {
592 self.push_state(State::BlockMappingKey);
593 self.parse_node(true, true)
594 }
595 }
596 }
597 Token(mark, _) => {
598 self.state = State::BlockMappingKey;
599 // empty scalar
600 Ok((Event::empty_scalar(), mark))
601 }
602 }
603 }
604
605 fn flow_mapping_key(&mut self, first: bool) -> ParseResult {
606 if first {
607 let _ = self.peek_token()?;
608 self.skip();
609 }
610 let marker: Marker =
611 {
612 match *self.peek_token()? {
613 Token(mark, TokenType::FlowMappingEnd) => mark,
614 Token(mark, _) => {
615 if !first {
616 match *self.peek_token()? {
617 Token(_, TokenType::FlowEntry) => self.skip(),
618 Token(mark, _) => return Err(ScanError::new(mark,
619 "while parsing a flow mapping, did not find expected ',' or '}'"))
620 }
621 }
622
623 match *self.peek_token()? {
624 Token(_, TokenType::Key) => {
625 self.skip();
626 match *self.peek_token()? {
627 Token(mark, TokenType::Value)
628 | Token(mark, TokenType::FlowEntry)
629 | Token(mark, TokenType::FlowMappingEnd) => {
630 self.state = State::FlowMappingValue;
631 return Ok((Event::empty_scalar(), mark));
632 }
633 _ => {
634 self.push_state(State::FlowMappingValue);
635 return self.parse_node(false, false);
636 }
637 }
638 }
639 Token(marker, TokenType::Value) => {
640 self.state = State::FlowMappingValue;
641 return Ok((Event::empty_scalar(), marker));
642 }
643 Token(_, TokenType::FlowMappingEnd) => (),
644 _ => {
645 self.push_state(State::FlowMappingEmptyValue);
646 return self.parse_node(false, false);
647 }
648 }
649
650 mark
651 }
652 }
653 };
654
655 self.pop_state();
656 self.skip();
657 Ok((Event::MappingEnd, marker))
658 }
659
660 fn flow_mapping_value(&mut self, empty: bool) -> ParseResult {
661 let mark: Marker = {
662 if empty {
663 let Token(mark, _) = *self.peek_token()?;
664 self.state = State::FlowMappingKey;
665 return Ok((Event::empty_scalar(), mark));
666 } else {
667 match *self.peek_token()? {
668 Token(marker, TokenType::Value) => {
669 self.skip();
670 match self.peek_token()?.1 {
671 TokenType::FlowEntry | TokenType::FlowMappingEnd => {}
672 _ => {
673 self.push_state(State::FlowMappingKey);
674 return self.parse_node(false, false);
675 }
676 }
677 marker
678 }
679 Token(marker, _) => marker,
680 }
681 }
682 };
683
684 self.state = State::FlowMappingKey;
685 Ok((Event::empty_scalar(), mark))
686 }
687
688 fn flow_sequence_entry(&mut self, first: bool) -> ParseResult {
689 // skip FlowMappingStart
690 if first {
691 let _ = self.peek_token()?;
692 //self.marks.push(tok.0);
693 self.skip();
694 }
695 match *self.peek_token()? {
696 Token(mark, TokenType::FlowSequenceEnd) => {
697 self.pop_state();
698 self.skip();
699 return Ok((Event::SequenceEnd, mark));
700 }
701 Token(_, TokenType::FlowEntry) if !first => {
702 self.skip();
703 }
704 Token(mark, _) if !first => {
705 return Err(ScanError::new(
706 mark,
707 "while parsing a flow sequence, expected ',' or ']'",
708 ));
709 }
710 _ => { /* next */ }
711 }
712 match *self.peek_token()? {
713 Token(mark, TokenType::FlowSequenceEnd) => {
714 self.pop_state();
715 self.skip();
716 Ok((Event::SequenceEnd, mark))
717 }
718 Token(mark, TokenType::Key) => {
719 self.state = State::FlowSequenceEntryMappingKey;
720 self.skip();
721 Ok((Event::MappingStart(0), mark))
722 }
723 _ => {
724 self.push_state(State::FlowSequenceEntry);
725 self.parse_node(false, false)
726 }
727 }
728 }
729
730 fn indentless_sequence_entry(&mut self) -> ParseResult {
731 match *self.peek_token()? {
732 Token(_, TokenType::BlockEntry) => (),
733 Token(mark, _) => {
734 self.pop_state();
735 return Ok((Event::SequenceEnd, mark));
736 }
737 }
738 self.skip();
739 match *self.peek_token()? {
740 Token(mark, TokenType::BlockEntry)
741 | Token(mark, TokenType::Key)
742 | Token(mark, TokenType::Value)
743 | Token(mark, TokenType::BlockEnd) => {
744 self.state = State::IndentlessSequenceEntry;
745 Ok((Event::empty_scalar(), mark))
746 }
747 _ => {
748 self.push_state(State::IndentlessSequenceEntry);
749 self.parse_node(true, false)
750 }
751 }
752 }
753
754 fn block_sequence_entry(&mut self, first: bool) -> ParseResult {
755 // BLOCK-SEQUENCE-START
756 if first {
757 let _ = self.peek_token()?;
758 //self.marks.push(tok.0);
759 self.skip();
760 }
761 match *self.peek_token()? {
762 Token(mark, TokenType::BlockEnd) => {
763 self.pop_state();
764 self.skip();
765 Ok((Event::SequenceEnd, mark))
766 }
767 Token(_, TokenType::BlockEntry) => {
768 self.skip();
769 match *self.peek_token()? {
770 Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => {
771 self.state = State::BlockSequenceEntry;
772 Ok((Event::empty_scalar(), mark))
773 }
774 _ => {
775 self.push_state(State::BlockSequenceEntry);
776 self.parse_node(true, false)
777 }
778 }
779 }
780 Token(mark, _) => Err(ScanError::new(
781 mark,
782 "while parsing a block collection, did not find expected '-' indicator",
783 )),
784 }
785 }
786
787 fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult {
788 match *self.peek_token()? {
789 Token(mark, TokenType::Value)
790 | Token(mark, TokenType::FlowEntry)
791 | Token(mark, TokenType::FlowSequenceEnd) => {
792 self.skip();
793 self.state = State::FlowSequenceEntryMappingValue;
794 Ok((Event::empty_scalar(), mark))
795 }
796 _ => {
797 self.push_state(State::FlowSequenceEntryMappingValue);
798 self.parse_node(false, false)
799 }
800 }
801 }
802
803 fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
804 match *self.peek_token()? {
805 Token(_, TokenType::Value) => {
806 self.skip();
807 self.state = State::FlowSequenceEntryMappingValue;
808 match *self.peek_token()? {
809 Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => {
810 self.state = State::FlowSequenceEntryMappingEnd;
811 Ok((Event::empty_scalar(), mark))
812 }
813 _ => {
814 self.push_state(State::FlowSequenceEntryMappingEnd);
815 self.parse_node(false, false)
816 }
817 }
818 }
819 Token(mark, _) => {
820 self.state = State::FlowSequenceEntryMappingEnd;
821 Ok((Event::empty_scalar(), mark))
822 }
823 }
824 }
825
826 fn flow_sequence_entry_mapping_end(&mut self) -> ParseResult {
827 self.state = State::FlowSequenceEntry;
828 Ok((Event::MappingEnd, self.scanner.mark()))
829 }
830}
831
832#[cfg(test)]
833mod test {
834 use super::{Event, Parser};
835
836 #[test]
837 fn test_peek_eq_parse() {
838 let s = "
839a0 bb: val
840a1: &x
841 b1: 4
842 b2: d
843a2: 4
844a3: [1, 2, 3]
845a4:
846 - [a1, a2]
847 - 2
848a5: *x
849";
850 let mut p = Parser::new(s.chars());
851 while {
852 let event_peek = p.peek().unwrap().clone();
853 let event = p.next().unwrap();
854 assert_eq!(event, event_peek);
855 event.0 != Event::StreamEnd
856 } {}
857 }
858}
859