Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/ruamel/yaml/parser.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:4f3585e2f14b |
---|---|
1 # coding: utf-8 | |
2 | |
3 from __future__ import absolute_import | |
4 | |
5 # The following YAML grammar is LL(1) and is parsed by a recursive descent | |
6 # parser. | |
7 # | |
8 # stream ::= STREAM-START implicit_document? explicit_document* | |
9 # STREAM-END | |
10 # implicit_document ::= block_node DOCUMENT-END* | |
11 # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | |
12 # block_node_or_indentless_sequence ::= | |
13 # ALIAS | |
14 # | properties (block_content | | |
15 # indentless_block_sequence)? | |
16 # | block_content | |
17 # | indentless_block_sequence | |
18 # block_node ::= ALIAS | |
19 # | properties block_content? | |
20 # | block_content | |
21 # flow_node ::= ALIAS | |
22 # | properties flow_content? | |
23 # | flow_content | |
24 # properties ::= TAG ANCHOR? | ANCHOR TAG? | |
25 # block_content ::= block_collection | flow_collection | SCALAR | |
26 # flow_content ::= flow_collection | SCALAR | |
27 # block_collection ::= block_sequence | block_mapping | |
28 # flow_collection ::= flow_sequence | flow_mapping | |
29 # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* | |
30 # BLOCK-END | |
31 # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ | |
32 # block_mapping ::= BLOCK-MAPPING_START | |
33 # ((KEY block_node_or_indentless_sequence?)? | |
34 # (VALUE block_node_or_indentless_sequence?)?)* | |
35 # BLOCK-END | |
36 # flow_sequence ::= FLOW-SEQUENCE-START | |
37 # (flow_sequence_entry FLOW-ENTRY)* | |
38 # flow_sequence_entry? | |
39 # FLOW-SEQUENCE-END | |
40 # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
41 # flow_mapping ::= FLOW-MAPPING-START | |
42 # (flow_mapping_entry FLOW-ENTRY)* | |
43 # flow_mapping_entry? | |
44 # FLOW-MAPPING-END | |
45 # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
46 # | |
47 # FIRST sets: | |
48 # | |
49 # stream: { STREAM-START } | |
50 # explicit_document: { DIRECTIVE DOCUMENT-START } | |
51 # implicit_document: FIRST(block_node) | |
52 # block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START | |
53 # BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
54 # flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
55 # block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START | |
56 # FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } | |
57 # flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } | |
58 # block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } | |
59 # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
60 # block_sequence: { BLOCK-SEQUENCE-START } | |
61 # block_mapping: { BLOCK-MAPPING-START } | |
62 # block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR | |
63 # BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START | |
64 # FLOW-MAPPING-START BLOCK-ENTRY } | |
65 # indentless_sequence: { ENTRY } | |
66 # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } | |
67 # flow_sequence: { FLOW-SEQUENCE-START } | |
68 # flow_mapping: { FLOW-MAPPING-START } | |
69 # flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START | |
70 # FLOW-MAPPING-START KEY } | |
71 # flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START | |
72 # FLOW-MAPPING-START KEY } | |
73 | |
74 # need to have full path with import, as pkg_resources tries to load parser.py in __init__.py | |
75 # only to not do anything with the package afterwards | |
76 # and for Jython too | |
77 | |
78 | |
79 from ruamel.yaml.error import MarkedYAMLError | |
80 from ruamel.yaml.tokens import * # NOQA | |
81 from ruamel.yaml.events import * # NOQA | |
82 from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA | |
83 from ruamel.yaml.compat import utf8, nprint, nprintf # NOQA | |
84 | |
85 if False: # MYPY | |
86 from typing import Any, Dict, Optional, List # NOQA | |
87 | |
88 __all__ = ['Parser', 'RoundTripParser', 'ParserError'] | |
89 | |
90 | |
91 class ParserError(MarkedYAMLError): | |
92 pass | |
93 | |
94 | |
95 class Parser(object): | |
96 # Since writing a recursive-descendant parser is a straightforward task, we | |
97 # do not give many comments here. | |
98 | |
99 DEFAULT_TAGS = {u'!': u'!', u'!!': u'tag:yaml.org,2002:'} | |
100 | |
101 def __init__(self, loader): | |
102 # type: (Any) -> None | |
103 self.loader = loader | |
104 if self.loader is not None and getattr(self.loader, '_parser', None) is None: | |
105 self.loader._parser = self | |
106 self.reset_parser() | |
107 | |
108 def reset_parser(self): | |
109 # type: () -> None | |
110 # Reset the state attributes (to clear self-references) | |
111 self.current_event = None | |
112 self.tag_handles = {} # type: Dict[Any, Any] | |
113 self.states = [] # type: List[Any] | |
114 self.marks = [] # type: List[Any] | |
115 self.state = self.parse_stream_start # type: Any | |
116 | |
117 def dispose(self): | |
118 # type: () -> None | |
119 self.reset_parser() | |
120 | |
121 @property | |
122 def scanner(self): | |
123 # type: () -> Any | |
124 if hasattr(self.loader, 'typ'): | |
125 return self.loader.scanner | |
126 return self.loader._scanner | |
127 | |
128 @property | |
129 def resolver(self): | |
130 # type: () -> Any | |
131 if hasattr(self.loader, 'typ'): | |
132 return self.loader.resolver | |
133 return self.loader._resolver | |
134 | |
135 def check_event(self, *choices): | |
136 # type: (Any) -> bool | |
137 # Check the type of the next event. | |
138 if self.current_event is None: | |
139 if self.state: | |
140 self.current_event = self.state() | |
141 if self.current_event is not None: | |
142 if not choices: | |
143 return True | |
144 for choice in choices: | |
145 if isinstance(self.current_event, choice): | |
146 return True | |
147 return False | |
148 | |
149 def peek_event(self): | |
150 # type: () -> Any | |
151 # Get the next event. | |
152 if self.current_event is None: | |
153 if self.state: | |
154 self.current_event = self.state() | |
155 return self.current_event | |
156 | |
157 def get_event(self): | |
158 # type: () -> Any | |
159 # Get the next event and proceed further. | |
160 if self.current_event is None: | |
161 if self.state: | |
162 self.current_event = self.state() | |
163 value = self.current_event | |
164 self.current_event = None | |
165 return value | |
166 | |
167 # stream ::= STREAM-START implicit_document? explicit_document* | |
168 # STREAM-END | |
169 # implicit_document ::= block_node DOCUMENT-END* | |
170 # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* | |
171 | |
172 def parse_stream_start(self): | |
173 # type: () -> Any | |
174 # Parse the stream start. | |
175 token = self.scanner.get_token() | |
176 token.move_comment(self.scanner.peek_token()) | |
177 event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding) | |
178 | |
179 # Prepare the next state. | |
180 self.state = self.parse_implicit_document_start | |
181 | |
182 return event | |
183 | |
184 def parse_implicit_document_start(self): | |
185 # type: () -> Any | |
186 # Parse an implicit document. | |
187 if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken): | |
188 self.tag_handles = self.DEFAULT_TAGS | |
189 token = self.scanner.peek_token() | |
190 start_mark = end_mark = token.start_mark | |
191 event = DocumentStartEvent(start_mark, end_mark, explicit=False) | |
192 | |
193 # Prepare the next state. | |
194 self.states.append(self.parse_document_end) | |
195 self.state = self.parse_block_node | |
196 | |
197 return event | |
198 | |
199 else: | |
200 return self.parse_document_start() | |
201 | |
202 def parse_document_start(self): | |
203 # type: () -> Any | |
204 # Parse any extra document end indicators. | |
205 while self.scanner.check_token(DocumentEndToken): | |
206 self.scanner.get_token() | |
207 # Parse an explicit document. | |
208 if not self.scanner.check_token(StreamEndToken): | |
209 token = self.scanner.peek_token() | |
210 start_mark = token.start_mark | |
211 version, tags = self.process_directives() | |
212 if not self.scanner.check_token(DocumentStartToken): | |
213 raise ParserError( | |
214 None, | |
215 None, | |
216 "expected '<document start>', but found %r" % self.scanner.peek_token().id, | |
217 self.scanner.peek_token().start_mark, | |
218 ) | |
219 token = self.scanner.get_token() | |
220 end_mark = token.end_mark | |
221 # if self.loader is not None and \ | |
222 # end_mark.line != self.scanner.peek_token().start_mark.line: | |
223 # self.loader.scalar_after_indicator = False | |
224 event = DocumentStartEvent( | |
225 start_mark, end_mark, explicit=True, version=version, tags=tags | |
226 ) # type: Any | |
227 self.states.append(self.parse_document_end) | |
228 self.state = self.parse_document_content | |
229 else: | |
230 # Parse the end of the stream. | |
231 token = self.scanner.get_token() | |
232 event = StreamEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
233 assert not self.states | |
234 assert not self.marks | |
235 self.state = None | |
236 return event | |
237 | |
238 def parse_document_end(self): | |
239 # type: () -> Any | |
240 # Parse the document end. | |
241 token = self.scanner.peek_token() | |
242 start_mark = end_mark = token.start_mark | |
243 explicit = False | |
244 if self.scanner.check_token(DocumentEndToken): | |
245 token = self.scanner.get_token() | |
246 end_mark = token.end_mark | |
247 explicit = True | |
248 event = DocumentEndEvent(start_mark, end_mark, explicit=explicit) | |
249 | |
250 # Prepare the next state. | |
251 if self.resolver.processing_version == (1, 1): | |
252 self.state = self.parse_document_start | |
253 else: | |
254 self.state = self.parse_implicit_document_start | |
255 | |
256 return event | |
257 | |
258 def parse_document_content(self): | |
259 # type: () -> Any | |
260 if self.scanner.check_token( | |
261 DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken | |
262 ): | |
263 event = self.process_empty_scalar(self.scanner.peek_token().start_mark) | |
264 self.state = self.states.pop() | |
265 return event | |
266 else: | |
267 return self.parse_block_node() | |
268 | |
269 def process_directives(self): | |
270 # type: () -> Any | |
271 yaml_version = None | |
272 self.tag_handles = {} | |
273 while self.scanner.check_token(DirectiveToken): | |
274 token = self.scanner.get_token() | |
275 if token.name == u'YAML': | |
276 if yaml_version is not None: | |
277 raise ParserError( | |
278 None, None, 'found duplicate YAML directive', token.start_mark | |
279 ) | |
280 major, minor = token.value | |
281 if major != 1: | |
282 raise ParserError( | |
283 None, | |
284 None, | |
285 'found incompatible YAML document (version 1.* is ' 'required)', | |
286 token.start_mark, | |
287 ) | |
288 yaml_version = token.value | |
289 elif token.name == u'TAG': | |
290 handle, prefix = token.value | |
291 if handle in self.tag_handles: | |
292 raise ParserError( | |
293 None, None, 'duplicate tag handle %r' % utf8(handle), token.start_mark | |
294 ) | |
295 self.tag_handles[handle] = prefix | |
296 if bool(self.tag_handles): | |
297 value = yaml_version, self.tag_handles.copy() # type: Any | |
298 else: | |
299 value = yaml_version, None | |
300 if self.loader is not None and hasattr(self.loader, 'tags'): | |
301 self.loader.version = yaml_version | |
302 if self.loader.tags is None: | |
303 self.loader.tags = {} | |
304 for k in self.tag_handles: | |
305 self.loader.tags[k] = self.tag_handles[k] | |
306 for key in self.DEFAULT_TAGS: | |
307 if key not in self.tag_handles: | |
308 self.tag_handles[key] = self.DEFAULT_TAGS[key] | |
309 return value | |
310 | |
311 # block_node_or_indentless_sequence ::= ALIAS | |
312 # | properties (block_content | indentless_block_sequence)? | |
313 # | block_content | |
314 # | indentless_block_sequence | |
315 # block_node ::= ALIAS | |
316 # | properties block_content? | |
317 # | block_content | |
318 # flow_node ::= ALIAS | |
319 # | properties flow_content? | |
320 # | flow_content | |
321 # properties ::= TAG ANCHOR? | ANCHOR TAG? | |
322 # block_content ::= block_collection | flow_collection | SCALAR | |
323 # flow_content ::= flow_collection | SCALAR | |
324 # block_collection ::= block_sequence | block_mapping | |
325 # flow_collection ::= flow_sequence | flow_mapping | |
326 | |
327 def parse_block_node(self): | |
328 # type: () -> Any | |
329 return self.parse_node(block=True) | |
330 | |
331 def parse_flow_node(self): | |
332 # type: () -> Any | |
333 return self.parse_node() | |
334 | |
335 def parse_block_node_or_indentless_sequence(self): | |
336 # type: () -> Any | |
337 return self.parse_node(block=True, indentless_sequence=True) | |
338 | |
339 def transform_tag(self, handle, suffix): | |
340 # type: (Any, Any) -> Any | |
341 return self.tag_handles[handle] + suffix | |
342 | |
343 def parse_node(self, block=False, indentless_sequence=False): | |
344 # type: (bool, bool) -> Any | |
345 if self.scanner.check_token(AliasToken): | |
346 token = self.scanner.get_token() | |
347 event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any | |
348 self.state = self.states.pop() | |
349 return event | |
350 | |
351 anchor = None | |
352 tag = None | |
353 start_mark = end_mark = tag_mark = None | |
354 if self.scanner.check_token(AnchorToken): | |
355 token = self.scanner.get_token() | |
356 start_mark = token.start_mark | |
357 end_mark = token.end_mark | |
358 anchor = token.value | |
359 if self.scanner.check_token(TagToken): | |
360 token = self.scanner.get_token() | |
361 tag_mark = token.start_mark | |
362 end_mark = token.end_mark | |
363 tag = token.value | |
364 elif self.scanner.check_token(TagToken): | |
365 token = self.scanner.get_token() | |
366 start_mark = tag_mark = token.start_mark | |
367 end_mark = token.end_mark | |
368 tag = token.value | |
369 if self.scanner.check_token(AnchorToken): | |
370 token = self.scanner.get_token() | |
371 start_mark = tag_mark = token.start_mark | |
372 end_mark = token.end_mark | |
373 anchor = token.value | |
374 if tag is not None: | |
375 handle, suffix = tag | |
376 if handle is not None: | |
377 if handle not in self.tag_handles: | |
378 raise ParserError( | |
379 'while parsing a node', | |
380 start_mark, | |
381 'found undefined tag handle %r' % utf8(handle), | |
382 tag_mark, | |
383 ) | |
384 tag = self.transform_tag(handle, suffix) | |
385 else: | |
386 tag = suffix | |
387 # if tag == u'!': | |
388 # raise ParserError("while parsing a node", start_mark, | |
389 # "found non-specific tag '!'", tag_mark, | |
390 # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' | |
391 # and share your opinion.") | |
392 if start_mark is None: | |
393 start_mark = end_mark = self.scanner.peek_token().start_mark | |
394 event = None | |
395 implicit = tag is None or tag == u'!' | |
396 if indentless_sequence and self.scanner.check_token(BlockEntryToken): | |
397 comment = None | |
398 pt = self.scanner.peek_token() | |
399 if pt.comment and pt.comment[0]: | |
400 comment = [pt.comment[0], []] | |
401 pt.comment[0] = None | |
402 end_mark = self.scanner.peek_token().end_mark | |
403 event = SequenceStartEvent( | |
404 anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment | |
405 ) | |
406 self.state = self.parse_indentless_sequence_entry | |
407 return event | |
408 | |
409 if self.scanner.check_token(ScalarToken): | |
410 token = self.scanner.get_token() | |
411 # self.scanner.peek_token_same_line_comment(token) | |
412 end_mark = token.end_mark | |
413 if (token.plain and tag is None) or tag == u'!': | |
414 implicit = (True, False) | |
415 elif tag is None: | |
416 implicit = (False, True) | |
417 else: | |
418 implicit = (False, False) | |
419 # nprint('se', token.value, token.comment) | |
420 event = ScalarEvent( | |
421 anchor, | |
422 tag, | |
423 implicit, | |
424 token.value, | |
425 start_mark, | |
426 end_mark, | |
427 style=token.style, | |
428 comment=token.comment, | |
429 ) | |
430 self.state = self.states.pop() | |
431 elif self.scanner.check_token(FlowSequenceStartToken): | |
432 pt = self.scanner.peek_token() | |
433 end_mark = pt.end_mark | |
434 event = SequenceStartEvent( | |
435 anchor, | |
436 tag, | |
437 implicit, | |
438 start_mark, | |
439 end_mark, | |
440 flow_style=True, | |
441 comment=pt.comment, | |
442 ) | |
443 self.state = self.parse_flow_sequence_first_entry | |
444 elif self.scanner.check_token(FlowMappingStartToken): | |
445 pt = self.scanner.peek_token() | |
446 end_mark = pt.end_mark | |
447 event = MappingStartEvent( | |
448 anchor, | |
449 tag, | |
450 implicit, | |
451 start_mark, | |
452 end_mark, | |
453 flow_style=True, | |
454 comment=pt.comment, | |
455 ) | |
456 self.state = self.parse_flow_mapping_first_key | |
457 elif block and self.scanner.check_token(BlockSequenceStartToken): | |
458 end_mark = self.scanner.peek_token().start_mark | |
459 # should inserting the comment be dependent on the | |
460 # indentation? | |
461 pt = self.scanner.peek_token() | |
462 comment = pt.comment | |
463 # nprint('pt0', type(pt)) | |
464 if comment is None or comment[1] is None: | |
465 comment = pt.split_comment() | |
466 # nprint('pt1', comment) | |
467 event = SequenceStartEvent( | |
468 anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment | |
469 ) | |
470 self.state = self.parse_block_sequence_first_entry | |
471 elif block and self.scanner.check_token(BlockMappingStartToken): | |
472 end_mark = self.scanner.peek_token().start_mark | |
473 comment = self.scanner.peek_token().comment | |
474 event = MappingStartEvent( | |
475 anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment | |
476 ) | |
477 self.state = self.parse_block_mapping_first_key | |
478 elif anchor is not None or tag is not None: | |
479 # Empty scalars are allowed even if a tag or an anchor is | |
480 # specified. | |
481 event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark) | |
482 self.state = self.states.pop() | |
483 else: | |
484 if block: | |
485 node = 'block' | |
486 else: | |
487 node = 'flow' | |
488 token = self.scanner.peek_token() | |
489 raise ParserError( | |
490 'while parsing a %s node' % node, | |
491 start_mark, | |
492 'expected the node content, but found %r' % token.id, | |
493 token.start_mark, | |
494 ) | |
495 return event | |
496 | |
497 # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* | |
498 # BLOCK-END | |
499 | |
500 def parse_block_sequence_first_entry(self): | |
501 # type: () -> Any | |
502 token = self.scanner.get_token() | |
503 # move any comment from start token | |
504 # token.move_comment(self.scanner.peek_token()) | |
505 self.marks.append(token.start_mark) | |
506 return self.parse_block_sequence_entry() | |
507 | |
508 def parse_block_sequence_entry(self): | |
509 # type: () -> Any | |
510 if self.scanner.check_token(BlockEntryToken): | |
511 token = self.scanner.get_token() | |
512 token.move_comment(self.scanner.peek_token()) | |
513 if not self.scanner.check_token(BlockEntryToken, BlockEndToken): | |
514 self.states.append(self.parse_block_sequence_entry) | |
515 return self.parse_block_node() | |
516 else: | |
517 self.state = self.parse_block_sequence_entry | |
518 return self.process_empty_scalar(token.end_mark) | |
519 if not self.scanner.check_token(BlockEndToken): | |
520 token = self.scanner.peek_token() | |
521 raise ParserError( | |
522 'while parsing a block collection', | |
523 self.marks[-1], | |
524 'expected <block end>, but found %r' % token.id, | |
525 token.start_mark, | |
526 ) | |
527 token = self.scanner.get_token() # BlockEndToken | |
528 event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
529 self.state = self.states.pop() | |
530 self.marks.pop() | |
531 return event | |
532 | |
533 # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ | |
534 | |
535 # indentless_sequence? | |
536 # sequence: | |
537 # - entry | |
538 # - nested | |
539 | |
540 def parse_indentless_sequence_entry(self): | |
541 # type: () -> Any | |
542 if self.scanner.check_token(BlockEntryToken): | |
543 token = self.scanner.get_token() | |
544 token.move_comment(self.scanner.peek_token()) | |
545 if not self.scanner.check_token( | |
546 BlockEntryToken, KeyToken, ValueToken, BlockEndToken | |
547 ): | |
548 self.states.append(self.parse_indentless_sequence_entry) | |
549 return self.parse_block_node() | |
550 else: | |
551 self.state = self.parse_indentless_sequence_entry | |
552 return self.process_empty_scalar(token.end_mark) | |
553 token = self.scanner.peek_token() | |
554 event = SequenceEndEvent(token.start_mark, token.start_mark, comment=token.comment) | |
555 self.state = self.states.pop() | |
556 return event | |
557 | |
558 # block_mapping ::= BLOCK-MAPPING_START | |
559 # ((KEY block_node_or_indentless_sequence?)? | |
560 # (VALUE block_node_or_indentless_sequence?)?)* | |
561 # BLOCK-END | |
562 | |
563 def parse_block_mapping_first_key(self): | |
564 # type: () -> Any | |
565 token = self.scanner.get_token() | |
566 self.marks.append(token.start_mark) | |
567 return self.parse_block_mapping_key() | |
568 | |
569 def parse_block_mapping_key(self): | |
570 # type: () -> Any | |
571 if self.scanner.check_token(KeyToken): | |
572 token = self.scanner.get_token() | |
573 token.move_comment(self.scanner.peek_token()) | |
574 if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken): | |
575 self.states.append(self.parse_block_mapping_value) | |
576 return self.parse_block_node_or_indentless_sequence() | |
577 else: | |
578 self.state = self.parse_block_mapping_value | |
579 return self.process_empty_scalar(token.end_mark) | |
580 if self.resolver.processing_version > (1, 1) and self.scanner.check_token(ValueToken): | |
581 self.state = self.parse_block_mapping_value | |
582 return self.process_empty_scalar(self.scanner.peek_token().start_mark) | |
583 if not self.scanner.check_token(BlockEndToken): | |
584 token = self.scanner.peek_token() | |
585 raise ParserError( | |
586 'while parsing a block mapping', | |
587 self.marks[-1], | |
588 'expected <block end>, but found %r' % token.id, | |
589 token.start_mark, | |
590 ) | |
591 token = self.scanner.get_token() | |
592 token.move_comment(self.scanner.peek_token()) | |
593 event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
594 self.state = self.states.pop() | |
595 self.marks.pop() | |
596 return event | |
597 | |
598 def parse_block_mapping_value(self): | |
599 # type: () -> Any | |
600 if self.scanner.check_token(ValueToken): | |
601 token = self.scanner.get_token() | |
602 # value token might have post comment move it to e.g. block | |
603 if self.scanner.check_token(ValueToken): | |
604 token.move_comment(self.scanner.peek_token()) | |
605 else: | |
606 if not self.scanner.check_token(KeyToken): | |
607 token.move_comment(self.scanner.peek_token(), empty=True) | |
608 # else: empty value for this key cannot move token.comment | |
609 if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken): | |
610 self.states.append(self.parse_block_mapping_key) | |
611 return self.parse_block_node_or_indentless_sequence() | |
612 else: | |
613 self.state = self.parse_block_mapping_key | |
614 comment = token.comment | |
615 if comment is None: | |
616 token = self.scanner.peek_token() | |
617 comment = token.comment | |
618 if comment: | |
619 token._comment = [None, comment[1]] | |
620 comment = [comment[0], None] | |
621 return self.process_empty_scalar(token.end_mark, comment=comment) | |
622 else: | |
623 self.state = self.parse_block_mapping_key | |
624 token = self.scanner.peek_token() | |
625 return self.process_empty_scalar(token.start_mark) | |
626 | |
627 # flow_sequence ::= FLOW-SEQUENCE-START | |
628 # (flow_sequence_entry FLOW-ENTRY)* | |
629 # flow_sequence_entry? | |
630 # FLOW-SEQUENCE-END | |
631 # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
632 # | |
633 # Note that while production rules for both flow_sequence_entry and | |
634 # flow_mapping_entry are equal, their interpretations are different. | |
635 # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` | |
636 # generate an inline mapping (set syntax). | |
637 | |
638 def parse_flow_sequence_first_entry(self): | |
639 # type: () -> Any | |
640 token = self.scanner.get_token() | |
641 self.marks.append(token.start_mark) | |
642 return self.parse_flow_sequence_entry(first=True) | |
643 | |
644 def parse_flow_sequence_entry(self, first=False): | |
645 # type: (bool) -> Any | |
646 if not self.scanner.check_token(FlowSequenceEndToken): | |
647 if not first: | |
648 if self.scanner.check_token(FlowEntryToken): | |
649 self.scanner.get_token() | |
650 else: | |
651 token = self.scanner.peek_token() | |
652 raise ParserError( | |
653 'while parsing a flow sequence', | |
654 self.marks[-1], | |
655 "expected ',' or ']', but got %r" % token.id, | |
656 token.start_mark, | |
657 ) | |
658 | |
659 if self.scanner.check_token(KeyToken): | |
660 token = self.scanner.peek_token() | |
661 event = MappingStartEvent( | |
662 None, None, True, token.start_mark, token.end_mark, flow_style=True | |
663 ) # type: Any | |
664 self.state = self.parse_flow_sequence_entry_mapping_key | |
665 return event | |
666 elif not self.scanner.check_token(FlowSequenceEndToken): | |
667 self.states.append(self.parse_flow_sequence_entry) | |
668 return self.parse_flow_node() | |
669 token = self.scanner.get_token() | |
670 event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
671 self.state = self.states.pop() | |
672 self.marks.pop() | |
673 return event | |
674 | |
675 def parse_flow_sequence_entry_mapping_key(self): | |
676 # type: () -> Any | |
677 token = self.scanner.get_token() | |
678 if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken): | |
679 self.states.append(self.parse_flow_sequence_entry_mapping_value) | |
680 return self.parse_flow_node() | |
681 else: | |
682 self.state = self.parse_flow_sequence_entry_mapping_value | |
683 return self.process_empty_scalar(token.end_mark) | |
684 | |
685 def parse_flow_sequence_entry_mapping_value(self): | |
686 # type: () -> Any | |
687 if self.scanner.check_token(ValueToken): | |
688 token = self.scanner.get_token() | |
689 if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken): | |
690 self.states.append(self.parse_flow_sequence_entry_mapping_end) | |
691 return self.parse_flow_node() | |
692 else: | |
693 self.state = self.parse_flow_sequence_entry_mapping_end | |
694 return self.process_empty_scalar(token.end_mark) | |
695 else: | |
696 self.state = self.parse_flow_sequence_entry_mapping_end | |
697 token = self.scanner.peek_token() | |
698 return self.process_empty_scalar(token.start_mark) | |
699 | |
700 def parse_flow_sequence_entry_mapping_end(self): | |
701 # type: () -> Any | |
702 self.state = self.parse_flow_sequence_entry | |
703 token = self.scanner.peek_token() | |
704 return MappingEndEvent(token.start_mark, token.start_mark) | |
705 | |
706 # flow_mapping ::= FLOW-MAPPING-START | |
707 # (flow_mapping_entry FLOW-ENTRY)* | |
708 # flow_mapping_entry? | |
709 # FLOW-MAPPING-END | |
710 # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? | |
711 | |
712 def parse_flow_mapping_first_key(self): | |
713 # type: () -> Any | |
714 token = self.scanner.get_token() | |
715 self.marks.append(token.start_mark) | |
716 return self.parse_flow_mapping_key(first=True) | |
717 | |
718 def parse_flow_mapping_key(self, first=False): | |
719 # type: (Any) -> Any | |
720 if not self.scanner.check_token(FlowMappingEndToken): | |
721 if not first: | |
722 if self.scanner.check_token(FlowEntryToken): | |
723 self.scanner.get_token() | |
724 else: | |
725 token = self.scanner.peek_token() | |
726 raise ParserError( | |
727 'while parsing a flow mapping', | |
728 self.marks[-1], | |
729 "expected ',' or '}', but got %r" % token.id, | |
730 token.start_mark, | |
731 ) | |
732 if self.scanner.check_token(KeyToken): | |
733 token = self.scanner.get_token() | |
734 if not self.scanner.check_token( | |
735 ValueToken, FlowEntryToken, FlowMappingEndToken | |
736 ): | |
737 self.states.append(self.parse_flow_mapping_value) | |
738 return self.parse_flow_node() | |
739 else: | |
740 self.state = self.parse_flow_mapping_value | |
741 return self.process_empty_scalar(token.end_mark) | |
742 elif self.resolver.processing_version > (1, 1) and self.scanner.check_token( | |
743 ValueToken | |
744 ): | |
745 self.state = self.parse_flow_mapping_value | |
746 return self.process_empty_scalar(self.scanner.peek_token().end_mark) | |
747 elif not self.scanner.check_token(FlowMappingEndToken): | |
748 self.states.append(self.parse_flow_mapping_empty_value) | |
749 return self.parse_flow_node() | |
750 token = self.scanner.get_token() | |
751 event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment) | |
752 self.state = self.states.pop() | |
753 self.marks.pop() | |
754 return event | |
755 | |
756 def parse_flow_mapping_value(self): | |
757 # type: () -> Any | |
758 if self.scanner.check_token(ValueToken): | |
759 token = self.scanner.get_token() | |
760 if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken): | |
761 self.states.append(self.parse_flow_mapping_key) | |
762 return self.parse_flow_node() | |
763 else: | |
764 self.state = self.parse_flow_mapping_key | |
765 return self.process_empty_scalar(token.end_mark) | |
766 else: | |
767 self.state = self.parse_flow_mapping_key | |
768 token = self.scanner.peek_token() | |
769 return self.process_empty_scalar(token.start_mark) | |
770 | |
771 def parse_flow_mapping_empty_value(self): | |
772 # type: () -> Any | |
773 self.state = self.parse_flow_mapping_key | |
774 return self.process_empty_scalar(self.scanner.peek_token().start_mark) | |
775 | |
776 def process_empty_scalar(self, mark, comment=None): | |
777 # type: (Any, Any) -> Any | |
778 return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment) | |
779 | |
780 | |
781 class RoundTripParser(Parser): | |
782 """roundtrip is a safe loader, that wants to see the unmangled tag""" | |
783 | |
784 def transform_tag(self, handle, suffix): | |
785 # type: (Any, Any) -> Any | |
786 # return self.tag_handles[handle]+suffix | |
787 if handle == '!!' and suffix in ( | |
788 u'null', | |
789 u'bool', | |
790 u'int', | |
791 u'float', | |
792 u'binary', | |
793 u'timestamp', | |
794 u'omap', | |
795 u'pairs', | |
796 u'set', | |
797 u'str', | |
798 u'seq', | |
799 u'map', | |
800 ): | |
801 return Parser.transform_tag(self, handle, suffix) | |
802 return handle + suffix |