Make compose() and load() ensure that the input stream contains a single document...
[pyyaml/python3.git] / ext / _yaml.pyx
blob76b307a590097106de1e083745dc64538b75a4e0
2 import yaml
4 def get_version_string():
5 return yaml_get_version_string()
7 def get_version():
8 cdef int major, minor, patch
9 yaml_get_version(&major, &minor, &patch)
10 return (major, minor, patch)
12 #Mark = yaml.error.Mark
13 YAMLError = yaml.error.YAMLError
14 ReaderError = yaml.reader.ReaderError
15 ScannerError = yaml.scanner.ScannerError
16 ParserError = yaml.parser.ParserError
17 ComposerError = yaml.composer.ComposerError
18 ConstructorError = yaml.constructor.ConstructorError
19 EmitterError = yaml.emitter.EmitterError
20 SerializerError = yaml.serializer.SerializerError
21 RepresenterError = yaml.representer.RepresenterError
23 StreamStartToken = yaml.tokens.StreamStartToken
24 StreamEndToken = yaml.tokens.StreamEndToken
25 DirectiveToken = yaml.tokens.DirectiveToken
26 DocumentStartToken = yaml.tokens.DocumentStartToken
27 DocumentEndToken = yaml.tokens.DocumentEndToken
28 BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken
29 BlockMappingStartToken = yaml.tokens.BlockMappingStartToken
30 BlockEndToken = yaml.tokens.BlockEndToken
31 FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken
32 FlowMappingStartToken = yaml.tokens.FlowMappingStartToken
33 FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken
34 FlowMappingEndToken = yaml.tokens.FlowMappingEndToken
35 KeyToken = yaml.tokens.KeyToken
36 ValueToken = yaml.tokens.ValueToken
37 BlockEntryToken = yaml.tokens.BlockEntryToken
38 FlowEntryToken = yaml.tokens.FlowEntryToken
39 AliasToken = yaml.tokens.AliasToken
40 AnchorToken = yaml.tokens.AnchorToken
41 TagToken = yaml.tokens.TagToken
42 ScalarToken = yaml.tokens.ScalarToken
44 StreamStartEvent = yaml.events.StreamStartEvent
45 StreamEndEvent = yaml.events.StreamEndEvent
46 DocumentStartEvent = yaml.events.DocumentStartEvent
47 DocumentEndEvent = yaml.events.DocumentEndEvent
48 AliasEvent = yaml.events.AliasEvent
49 ScalarEvent = yaml.events.ScalarEvent
50 SequenceStartEvent = yaml.events.SequenceStartEvent
51 SequenceEndEvent = yaml.events.SequenceEndEvent
52 MappingStartEvent = yaml.events.MappingStartEvent
53 MappingEndEvent = yaml.events.MappingEndEvent
55 ScalarNode = yaml.nodes.ScalarNode
56 SequenceNode = yaml.nodes.SequenceNode
57 MappingNode = yaml.nodes.MappingNode
59 cdef class Mark:
60 cdef readonly object name
61 cdef readonly int index
62 cdef readonly int line
63 cdef readonly int column
64 cdef readonly buffer
65 cdef readonly pointer
67 def __init__(self, object name, int index, int line, int column,
68 object buffer, object pointer):
69 self.name = name
70 self.index = index
71 self.line = line
72 self.column = column
73 self.buffer = buffer
74 self.pointer = pointer
76 def get_snippet(self):
77 return None
79 def __str__(self):
80 where = " in \"%s\", line %d, column %d" \
81 % (self.name, self.line+1, self.column+1)
82 return where
84 #class YAMLError(Exception):
85 # pass
87 #class MarkedYAMLError(YAMLError):
89 # def __init__(self, context=None, context_mark=None,
90 # problem=None, problem_mark=None, note=None):
91 # self.context = context
92 # self.context_mark = context_mark
93 # self.problem = problem
94 # self.problem_mark = problem_mark
95 # self.note = note
97 # def __str__(self):
98 # lines = []
99 # if self.context is not None:
100 # lines.append(self.context)
101 # if self.context_mark is not None \
102 # and (self.problem is None or self.problem_mark is None
103 # or self.context_mark.name != self.problem_mark.name
104 # or self.context_mark.line != self.problem_mark.line
105 # or self.context_mark.column != self.problem_mark.column):
106 # lines.append(str(self.context_mark))
107 # if self.problem is not None:
108 # lines.append(self.problem)
109 # if self.problem_mark is not None:
110 # lines.append(str(self.problem_mark))
111 # if self.note is not None:
112 # lines.append(self.note)
113 # return '\n'.join(lines)
115 #class ReaderError(YAMLError):
117 # def __init__(self, name, position, character, encoding, reason):
118 # self.name = name
119 # self.character = character
120 # self.position = position
121 # self.encoding = encoding
122 # self.reason = reason
124 # def __str__(self):
125 # if isinstance(self.character, str):
126 # return "'%s' codec can't decode byte #x%02x: %s\n" \
127 # " in \"%s\", position %d" \
128 # % (self.encoding, ord(self.character), self.reason,
129 # self.name, self.position)
130 # else:
131 # return "unacceptable character #x%04x: %s\n" \
132 # " in \"%s\", position %d" \
133 # % (ord(self.character), self.reason,
134 # self.name, self.position)
136 #class ScannerError(MarkedYAMLError):
137 # pass
139 #class ParserError(MarkedYAMLError):
140 # pass
142 #class EmitterError(YAMLError):
143 # pass
145 #cdef class Token:
146 # cdef readonly Mark start_mark
147 # cdef readonly Mark end_mark
148 # def __init__(self, Mark start_mark, Mark end_mark):
149 # self.start_mark = start_mark
150 # self.end_mark = end_mark
152 #cdef class StreamStartToken(Token):
153 # cdef readonly object encoding
154 # def __init__(self, Mark start_mark, Mark end_mark, encoding):
155 # self.start_mark = start_mark
156 # self.end_mark = end_mark
157 # self.encoding = encoding
159 #cdef class StreamEndToken(Token):
160 # pass
162 #cdef class DirectiveToken(Token):
163 # cdef readonly object name
164 # cdef readonly object value
165 # def __init__(self, name, value, Mark start_mark, Mark end_mark):
166 # self.name = name
167 # self.value = value
168 # self.start_mark = start_mark
169 # self.end_mark = end_mark
171 #cdef class DocumentStartToken(Token):
172 # pass
174 #cdef class DocumentEndToken(Token):
175 # pass
177 #cdef class BlockSequenceStartToken(Token):
178 # pass
180 #cdef class BlockMappingStartToken(Token):
181 # pass
183 #cdef class BlockEndToken(Token):
184 # pass
186 #cdef class FlowSequenceStartToken(Token):
187 # pass
189 #cdef class FlowMappingStartToken(Token):
190 # pass
192 #cdef class FlowSequenceEndToken(Token):
193 # pass
195 #cdef class FlowMappingEndToken(Token):
196 # pass
198 #cdef class KeyToken(Token):
199 # pass
201 #cdef class ValueToken(Token):
202 # pass
204 #cdef class BlockEntryToken(Token):
205 # pass
207 #cdef class FlowEntryToken(Token):
208 # pass
210 #cdef class AliasToken(Token):
211 # cdef readonly object value
212 # def __init__(self, value, Mark start_mark, Mark end_mark):
213 # self.value = value
214 # self.start_mark = start_mark
215 # self.end_mark = end_mark
217 #cdef class AnchorToken(Token):
218 # cdef readonly object value
219 # def __init__(self, value, Mark start_mark, Mark end_mark):
220 # self.value = value
221 # self.start_mark = start_mark
222 # self.end_mark = end_mark
224 #cdef class TagToken(Token):
225 # cdef readonly object value
226 # def __init__(self, value, Mark start_mark, Mark end_mark):
227 # self.value = value
228 # self.start_mark = start_mark
229 # self.end_mark = end_mark
231 #cdef class ScalarToken(Token):
232 # cdef readonly object value
233 # cdef readonly object plain
234 # cdef readonly object style
235 # def __init__(self, value, plain, Mark start_mark, Mark end_mark, style=None):
236 # self.value = value
237 # self.plain = plain
238 # self.start_mark = start_mark
239 # self.end_mark = end_mark
240 # self.style = style
242 cdef class CParser:
244 cdef yaml_parser_t parser
245 cdef yaml_event_t parsed_event
247 cdef object stream
248 cdef object stream_name
249 cdef object current_token
250 cdef object current_event
251 cdef object anchors
253 def __init__(self, stream):
254 if yaml_parser_initialize(&self.parser) == 0:
255 raise MemoryError
256 self.parsed_event.type = YAML_NO_EVENT
257 if hasattr(stream, 'read'):
258 self.stream = stream
259 try:
260 self.stream_name = stream.name
261 except AttributeError:
262 self.stream_name = '<file>'
263 yaml_parser_set_input(&self.parser, input_handler, <void *>self)
264 else:
265 if PyUnicode_CheckExact(stream) != 0:
266 stream = PyUnicode_AsUTF8String(stream)
267 self.stream_name = '<unicode string>'
268 else:
269 self.stream_name = '<string>'
270 if PyString_CheckExact(stream) == 0:
271 raise TypeError("a string or stream input is required")
272 self.stream = stream
273 yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream))
274 self.current_token = None
275 self.current_event = None
276 self.anchors = {}
278 def __dealloc__(self):
279 yaml_parser_delete(&self.parser)
280 yaml_event_delete(&self.parsed_event)
282 cdef object _parser_error(self):
283 if self.parser.error == YAML_MEMORY_ERROR:
284 return MemoryError
285 elif self.parser.error == YAML_READER_ERROR:
286 return ReaderError(self.stream_name, self.parser.problem_offset,
287 self.parser.problem_value, '?', self.parser.problem)
288 elif self.parser.error == YAML_SCANNER_ERROR \
289 or self.parser.error == YAML_PARSER_ERROR:
290 context_mark = None
291 problem_mark = None
292 if self.parser.context != NULL:
293 context_mark = Mark(self.stream_name,
294 self.parser.context_mark.index,
295 self.parser.context_mark.line,
296 self.parser.context_mark.column, None, None)
297 if self.parser.problem != NULL:
298 problem_mark = Mark(self.stream_name,
299 self.parser.problem_mark.index,
300 self.parser.problem_mark.line,
301 self.parser.problem_mark.column, None, None)
302 if self.parser.error == YAML_SCANNER_ERROR:
303 if self.parser.context != NULL:
304 return ScannerError(self.parser.context, context_mark,
305 self.parser.problem, problem_mark)
306 else:
307 return ScannerError(None, None,
308 self.parser.problem, problem_mark)
309 else:
310 if self.parser.context != NULL:
311 return ParserError(self.parser.context, context_mark,
312 self.parser.problem, problem_mark)
313 else:
314 return ParserError(None, None,
315 self.parser.problem, problem_mark)
316 raise ValueError("no parser error")
318 def raw_scan(self):
319 cdef yaml_token_t token
320 cdef int done
321 cdef int count
322 count = 0
323 done = 0
324 while done == 0:
325 if yaml_parser_scan(&self.parser, &token) == 0:
326 error = self._parser_error()
327 raise error
328 if token.type == YAML_NO_TOKEN:
329 done = 1
330 else:
331 count = count+1
332 yaml_token_delete(&token)
333 return count
335 cdef object _scan(self):
336 cdef yaml_token_t token
337 if yaml_parser_scan(&self.parser, &token) == 0:
338 error = self._parser_error()
339 raise error
340 token_object = self._token_to_object(&token)
341 yaml_token_delete(&token)
342 return token_object
344 cdef object _token_to_object(self, yaml_token_t *token):
345 start_mark = Mark(self.stream_name,
346 token.start_mark.index,
347 token.start_mark.line,
348 token.start_mark.column,
349 None, None)
350 end_mark = Mark(self.stream_name,
351 token.end_mark.index,
352 token.end_mark.line,
353 token.end_mark.column,
354 None, None)
355 if token.type == YAML_NO_TOKEN:
356 return None
357 elif token.type == YAML_STREAM_START_TOKEN:
358 encoding = None
359 if token.data.stream_start.encoding == YAML_UTF8_ENCODING:
360 encoding = "utf-8"
361 elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
362 encoding = "utf-16-le"
363 elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
364 encoding = "utf-16-be"
365 return StreamStartToken(start_mark, end_mark, encoding)
366 elif token.type == YAML_STREAM_END_TOKEN:
367 return StreamEndToken(start_mark, end_mark)
368 elif token.type == YAML_VERSION_DIRECTIVE_TOKEN:
369 return DirectiveToken("YAML",
370 (token.data.version_directive.major,
371 token.data.version_directive.minor),
372 start_mark, end_mark)
373 elif token.type == YAML_TAG_DIRECTIVE_TOKEN:
374 return DirectiveToken("TAG",
375 (token.data.tag_directive.handle,
376 token.data.tag_directive.prefix),
377 start_mark, end_mark)
378 elif token.type == YAML_DOCUMENT_START_TOKEN:
379 return DocumentStartToken(start_mark, end_mark)
380 elif token.type == YAML_DOCUMENT_END_TOKEN:
381 return DocumentEndToken(start_mark, end_mark)
382 elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN:
383 return BlockSequenceStartToken(start_mark, end_mark)
384 elif token.type == YAML_BLOCK_MAPPING_START_TOKEN:
385 return BlockMappingStartToken(start_mark, end_mark)
386 elif token.type == YAML_BLOCK_END_TOKEN:
387 return BlockEndToken(start_mark, end_mark)
388 elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN:
389 return FlowSequenceStartToken(start_mark, end_mark)
390 elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN:
391 return FlowSequenceEndToken(start_mark, end_mark)
392 elif token.type == YAML_FLOW_MAPPING_START_TOKEN:
393 return FlowMappingStartToken(start_mark, end_mark)
394 elif token.type == YAML_FLOW_MAPPING_END_TOKEN:
395 return FlowMappingEndToken(start_mark, end_mark)
396 elif token.type == YAML_BLOCK_ENTRY_TOKEN:
397 return BlockEntryToken(start_mark, end_mark)
398 elif token.type == YAML_FLOW_ENTRY_TOKEN:
399 return FlowEntryToken(start_mark, end_mark)
400 elif token.type == YAML_KEY_TOKEN:
401 return KeyToken(start_mark, end_mark)
402 elif token.type == YAML_VALUE_TOKEN:
403 return ValueToken(start_mark, end_mark)
404 elif token.type == YAML_ALIAS_TOKEN:
405 value = PyUnicode_DecodeUTF8(token.data.alias.value,
406 strlen(token.data.alias.value), 'strict')
407 return AliasToken(value, start_mark, end_mark)
408 elif token.type == YAML_ANCHOR_TOKEN:
409 value = PyUnicode_DecodeUTF8(token.data.anchor.value,
410 strlen(token.data.anchor.value), 'strict')
411 return AnchorToken(value, start_mark, end_mark)
412 elif token.type == YAML_TAG_TOKEN:
413 handle = PyUnicode_DecodeUTF8(token.data.tag.handle,
414 strlen(token.data.tag.handle), 'strict')
415 suffix = PyUnicode_DecodeUTF8(token.data.tag.suffix,
416 strlen(token.data.tag.suffix), 'strict')
417 if not handle:
418 handle = None
419 return TagToken((handle, suffix), start_mark, end_mark)
420 elif token.type == YAML_SCALAR_TOKEN:
421 value = PyUnicode_DecodeUTF8(token.data.scalar.value,
422 token.data.scalar.length, 'strict')
423 plain = False
424 style = None
425 if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
426 plain = True
427 style = ''
428 elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
429 style = '\''
430 elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
431 style = '"'
432 elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
433 style = '|'
434 elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
435 style = '>'
436 return ScalarToken(value, plain,
437 start_mark, end_mark, style)
438 else:
439 raise ValueError("unknown token type")
441 def get_token(self):
442 if self.current_token is not None:
443 value = self.current_token
444 self.current_token = None
445 else:
446 value = self._scan()
447 return value
449 def peek_token(self):
450 if self.current_token is None:
451 self.current_token = self._scan()
452 return self.current_token
454 def check_token(self, *choices):
455 if self.current_token is None:
456 self.current_token = self._scan()
457 if self.current_token is None:
458 return False
459 if not choices:
460 return True
461 token_class = self.current_token.__class__
462 for choice in choices:
463 if token_class is choice:
464 return True
465 return False
467 def raw_parse(self):
468 cdef yaml_event_t event
469 cdef int done
470 cdef int count
471 count = 0
472 done = 0
473 while done == 0:
474 if yaml_parser_parse(&self.parser, &event) == 0:
475 error = self._parser_error()
476 raise error
477 if event.type == YAML_NO_EVENT:
478 done = 1
479 else:
480 count = count+1
481 yaml_event_delete(&event)
482 return count
484 cdef object _parse(self):
485 cdef yaml_event_t event
486 if yaml_parser_parse(&self.parser, &event) == 0:
487 error = self._parser_error()
488 raise error
489 event_object = self._event_to_object(&event)
490 yaml_event_delete(&event)
491 return event_object
493 cdef object _event_to_object(self, yaml_event_t *event):
494 cdef yaml_tag_directive_t *tag_directive
495 start_mark = Mark(self.stream_name,
496 event.start_mark.index,
497 event.start_mark.line,
498 event.start_mark.column,
499 None, None)
500 end_mark = Mark(self.stream_name,
501 event.end_mark.index,
502 event.end_mark.line,
503 event.end_mark.column,
504 None, None)
505 if event.type == YAML_NO_EVENT:
506 return None
507 elif event.type == YAML_STREAM_START_EVENT:
508 encoding = None
509 if event.data.stream_start.encoding == YAML_UTF8_ENCODING:
510 encoding = "utf-8"
511 elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING:
512 encoding = "utf-16-le"
513 elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING:
514 encoding = "utf-16-be"
515 return StreamStartEvent(start_mark, end_mark, encoding)
516 elif event.type == YAML_STREAM_END_EVENT:
517 return StreamEndEvent(start_mark, end_mark)
519 elif event.type == YAML_DOCUMENT_START_EVENT:
520 explicit = False
521 if event.data.document_start.implicit == 0:
522 explicit = True
523 version = None
524 if event.data.document_start.version_directive != NULL:
525 version = (event.data.document_start.version_directive.major,
526 event.data.document_start.version_directive.minor)
527 tags = None
528 if event.data.document_start.tag_directives.start != NULL:
529 tags = {}
530 tag_directive = event.data.document_start.tag_directives.start
531 while tag_directive != event.data.document_start.tag_directives.end:
532 handle = PyUnicode_DecodeUTF8(tag_directive.handle,
533 strlen(tag_directive.handle), 'strict')
534 prefix = PyUnicode_DecodeUTF8(tag_directive.prefix,
535 strlen(tag_directive.prefix), 'strict')
536 tags[handle] = prefix
537 tag_directive = tag_directive+1
538 return DocumentStartEvent(start_mark, end_mark,
539 explicit, version, tags)
540 elif event.type == YAML_DOCUMENT_END_EVENT:
541 explicit = False
542 if event.data.document_end.implicit == 0:
543 explicit = True
544 return DocumentEndEvent(start_mark, end_mark, explicit)
545 elif event.type == YAML_ALIAS_EVENT:
546 anchor = PyUnicode_DecodeUTF8(event.data.alias.anchor,
547 strlen(event.data.alias.anchor), 'strict')
548 return AliasEvent(anchor, start_mark, end_mark)
549 elif event.type == YAML_SCALAR_EVENT:
550 anchor = None
551 if event.data.scalar.anchor != NULL:
552 anchor = PyUnicode_DecodeUTF8(event.data.scalar.anchor,
553 strlen(event.data.scalar.anchor), 'strict')
554 tag = None
555 if event.data.scalar.tag != NULL:
556 tag = PyUnicode_DecodeUTF8(event.data.scalar.tag,
557 strlen(event.data.scalar.tag), 'strict')
558 value = PyUnicode_DecodeUTF8(event.data.scalar.value,
559 event.data.scalar.length, 'strict')
560 plain_implicit = False
561 if event.data.scalar.plain_implicit == 1:
562 plain_implicit = True
563 quoted_implicit = False
564 if event.data.scalar.quoted_implicit == 1:
565 quoted_implicit = True
566 style = None
567 if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
568 style = ''
569 elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
570 style = '\''
571 elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
572 style = '"'
573 elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
574 style = '|'
575 elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
576 style = '>'
577 return ScalarEvent(anchor, tag,
578 (plain_implicit, quoted_implicit),
579 value, start_mark, end_mark, style)
580 elif event.type == YAML_SEQUENCE_START_EVENT:
581 anchor = None
582 if event.data.sequence_start.anchor != NULL:
583 anchor = PyUnicode_DecodeUTF8(event.data.sequence_start.anchor,
584 strlen(event.data.sequence_start.anchor), 'strict')
585 tag = None
586 if event.data.sequence_start.tag != NULL:
587 tag = PyUnicode_DecodeUTF8(event.data.sequence_start.tag,
588 strlen(event.data.sequence_start.tag), 'strict')
589 implicit = False
590 if event.data.sequence_start.implicit == 1:
591 implicit = True
592 flow_style = None
593 if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
594 flow_style = True
595 elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
596 flow_style = False
597 return SequenceStartEvent(anchor, tag, implicit,
598 start_mark, end_mark, flow_style)
599 elif event.type == YAML_MAPPING_START_EVENT:
600 anchor = None
601 if event.data.mapping_start.anchor != NULL:
602 anchor = PyUnicode_DecodeUTF8(event.data.mapping_start.anchor,
603 strlen(event.data.mapping_start.anchor), 'strict')
604 tag = None
605 if event.data.mapping_start.tag != NULL:
606 tag = PyUnicode_DecodeUTF8(event.data.mapping_start.tag,
607 strlen(event.data.mapping_start.tag), 'strict')
608 implicit = False
609 if event.data.mapping_start.implicit == 1:
610 implicit = True
611 flow_style = None
612 if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
613 flow_style = True
614 elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
615 flow_style = False
616 return MappingStartEvent(anchor, tag, implicit,
617 start_mark, end_mark, flow_style)
618 elif event.type == YAML_SEQUENCE_END_EVENT:
619 return SequenceEndEvent(start_mark, end_mark)
620 elif event.type == YAML_MAPPING_END_EVENT:
621 return MappingEndEvent(start_mark, end_mark)
623 else:
624 raise ValueError("unknown token type")
626 def get_event(self):
627 if self.current_event is not None:
628 value = self.current_event
629 self.current_event = None
630 else:
631 value = self._parse()
632 return value
634 def peek_event(self):
635 if self.current_event is None:
636 self.current_event = self._parse()
637 return self.current_event
639 def check_event(self, *choices):
640 if self.current_event is None:
641 self.current_event = self._parse()
642 if self.current_event is None:
643 return False
644 if not choices:
645 return True
646 event_class = self.current_event.__class__
647 for choice in choices:
648 if event_class is choice:
649 return True
650 return False
652 def check_node(self):
653 self._parse_next_event()
654 if self.parsed_event.type == YAML_STREAM_START_EVENT:
655 yaml_event_delete(&self.parsed_event)
656 self._parse_next_event()
657 if self.parsed_event.type != YAML_STREAM_END_EVENT:
658 return True
659 return False
661 def get_node(self):
662 self._parse_next_event()
663 if self.parsed_event.type != YAML_STREAM_END_EVENT:
664 return self._compose_document()
666 def get_single_node(self):
667 self._parse_next_event()
668 yaml_event_delete(&self.parsed_event)
669 self._parse_next_event()
670 document = None
671 if self.parsed_event.type != YAML_STREAM_END_EVENT:
672 document = self._compose_document()
673 self._parse_next_event()
674 if self.parsed_event.type != YAML_STREAM_END_EVENT:
675 mark = Mark(self.stream_name,
676 self.parsed_event.start_mark.index,
677 self.parsed_event.start_mark.line,
678 self.parsed_event.start_mark.column,
679 None, None)
680 raise ComposerError("expected a single document in the stream",
681 document.start_mark, "but found another document", mark)
682 return document
684 cdef object _compose_document(self):
685 yaml_event_delete(&self.parsed_event)
686 node = self._compose_node(None, None)
687 self._parse_next_event()
688 yaml_event_delete(&self.parsed_event)
689 self.anchors = {}
690 return node
692 cdef object _compose_node(self, object parent, object index):
693 self._parse_next_event()
694 if self.parsed_event.type == YAML_ALIAS_EVENT:
695 anchor = PyUnicode_DecodeUTF8(self.parsed_event.data.alias.anchor,
696 strlen(self.parsed_event.data.alias.anchor), 'strict')
697 if anchor not in self.anchors:
698 mark = Mark(self.stream_name,
699 self.parsed_event.start_mark.index,
700 self.parsed_event.start_mark.line,
701 self.parsed_event.start_mark.column,
702 None, None)
703 raise ComposerError(None, None, "found undefined alias", mark)
704 yaml_event_delete(&self.parsed_event)
705 return self.anchors[anchor]
706 anchor = None
707 if self.parsed_event.type == YAML_SCALAR_EVENT \
708 and self.parsed_event.data.scalar.anchor != NULL:
709 anchor = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.anchor,
710 strlen(self.parsed_event.data.scalar.anchor), 'strict')
711 elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \
712 and self.parsed_event.data.sequence_start.anchor != NULL:
713 anchor = PyUnicode_DecodeUTF8(self.parsed_event.data.sequence_start.anchor,
714 strlen(self.parsed_event.data.sequence_start.anchor), 'strict')
715 elif self.parsed_event.type == YAML_MAPPING_START_EVENT \
716 and self.parsed_event.data.mapping_start.anchor != NULL:
717 anchor = PyUnicode_DecodeUTF8(self.parsed_event.data.mapping_start.anchor,
718 strlen(self.parsed_event.data.mapping_start.anchor), 'strict')
719 if anchor is not None:
720 if anchor in self.anchors:
721 mark = Mark(self.stream_name,
722 self.parsed_event.start_mark.index,
723 self.parsed_event.start_mark.line,
724 self.parsed_event.start_mark.column,
725 None, None)
726 raise ComposerError("found duplicate anchor; first occurence",
727 self.anchors[anchor].start_mark, "second occurence", mark)
728 self.descend_resolver(parent, index)
729 if self.parsed_event.type == YAML_SCALAR_EVENT:
730 node = self._compose_scalar_node(anchor)
731 elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT:
732 node = self._compose_sequence_node(anchor)
733 elif self.parsed_event.type == YAML_MAPPING_START_EVENT:
734 node = self._compose_mapping_node(anchor)
735 self.ascend_resolver()
736 return node
738 cdef _compose_scalar_node(self, object anchor):
739 start_mark = Mark(self.stream_name,
740 self.parsed_event.start_mark.index,
741 self.parsed_event.start_mark.line,
742 self.parsed_event.start_mark.column,
743 None, None)
744 end_mark = Mark(self.stream_name,
745 self.parsed_event.end_mark.index,
746 self.parsed_event.end_mark.line,
747 self.parsed_event.end_mark.column,
748 None, None)
749 value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value,
750 self.parsed_event.data.scalar.length, 'strict')
751 plain_implicit = False
752 if self.parsed_event.data.scalar.plain_implicit == 1:
753 plain_implicit = True
754 quoted_implicit = False
755 if self.parsed_event.data.scalar.quoted_implicit == 1:
756 quoted_implicit = True
757 if self.parsed_event.data.scalar.tag == NULL \
758 or (self.parsed_event.data.scalar.tag[0] == c'!'
759 and self.parsed_event.data.scalar.tag[1] == c'\0'):
760 tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit))
761 else:
762 tag = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.tag,
763 strlen(self.parsed_event.data.scalar.tag), 'strict')
764 style = None
765 if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE:
766 style = ''
767 elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE:
768 style = '\''
769 elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE:
770 style = '"'
771 elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE:
772 style = '|'
773 elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE:
774 style = '>'
775 node = ScalarNode(tag, value, start_mark, end_mark, style)
776 if anchor is not None:
777 self.anchors[anchor] = node
778 yaml_event_delete(&self.parsed_event)
779 return node
781 cdef _compose_sequence_node(self, object anchor):
782 cdef int index
783 start_mark = Mark(self.stream_name,
784 self.parsed_event.start_mark.index,
785 self.parsed_event.start_mark.line,
786 self.parsed_event.start_mark.column,
787 None, None)
788 implicit = False
789 if self.parsed_event.data.sequence_start.implicit == 1:
790 implicit = True
791 if self.parsed_event.data.sequence_start.tag == NULL \
792 or (self.parsed_event.data.sequence_start.tag[0] == c'!'
793 and self.parsed_event.data.sequence_start.tag[1] == c'\0'):
794 tag = self.resolve(SequenceNode, None, implicit)
795 else:
796 tag = PyUnicode_DecodeUTF8(self.parsed_event.data.sequence_start.tag,
797 strlen(self.parsed_event.data.sequence_start.tag), 'strict')
798 flow_style = None
799 if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE:
800 flow_style = True
801 elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE:
802 flow_style = False
803 value = []
804 node = SequenceNode(tag, value, start_mark, None, flow_style)
805 if anchor is not None:
806 self.anchors[anchor] = node
807 yaml_event_delete(&self.parsed_event)
808 index = 0
809 self._parse_next_event()
810 while self.parsed_event.type != YAML_SEQUENCE_END_EVENT:
811 value.append(self._compose_node(node, index))
812 index = index+1
813 self._parse_next_event()
814 node.end_mark = Mark(self.stream_name,
815 self.parsed_event.end_mark.index,
816 self.parsed_event.end_mark.line,
817 self.parsed_event.end_mark.column,
818 None, None)
819 yaml_event_delete(&self.parsed_event)
820 return node
822 cdef _compose_mapping_node(self, object anchor):
823 start_mark = Mark(self.stream_name,
824 self.parsed_event.start_mark.index,
825 self.parsed_event.start_mark.line,
826 self.parsed_event.start_mark.column,
827 None, None)
828 implicit = False
829 if self.parsed_event.data.mapping_start.implicit == 1:
830 implicit = True
831 if self.parsed_event.data.mapping_start.tag == NULL \
832 or (self.parsed_event.data.mapping_start.tag[0] == c'!'
833 and self.parsed_event.data.mapping_start.tag[1] == c'\0'):
834 tag = self.resolve(MappingNode, None, implicit)
835 else:
836 tag = PyUnicode_DecodeUTF8(self.parsed_event.data.mapping_start.tag,
837 strlen(self.parsed_event.data.mapping_start.tag), 'strict')
838 flow_style = None
839 if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE:
840 flow_style = True
841 elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE:
842 flow_style = False
843 value = []
844 node = MappingNode(tag, value, start_mark, None, flow_style)
845 if anchor is not None:
846 self.anchors[anchor] = node
847 yaml_event_delete(&self.parsed_event)
848 self._parse_next_event()
849 while self.parsed_event.type != YAML_MAPPING_END_EVENT:
850 item_key = self._compose_node(node, None)
851 item_value = self._compose_node(node, item_key)
852 value.append((item_key, item_value))
853 self._parse_next_event()
854 node.end_mark = Mark(self.stream_name,
855 self.parsed_event.end_mark.index,
856 self.parsed_event.end_mark.line,
857 self.parsed_event.end_mark.column,
858 None, None)
859 yaml_event_delete(&self.parsed_event)
860 return node
862 cdef int _parse_next_event(self) except 0:
863 if self.parsed_event.type == YAML_NO_EVENT:
864 if yaml_parser_parse(&self.parser, &self.parsed_event) == 0:
865 error = self._parser_error()
866 raise error
867 return 1
869 cdef int input_handler(void *data, char *buffer, int size, int *read) except 0:
870 cdef CParser parser
871 parser = <CParser>data
872 value = parser.stream.read(size)
873 if PyString_CheckExact(value) == 0:
874 raise TypeError("a string value is expected")
875 if PyString_GET_SIZE(value) > size:
876 raise ValueError("a string value it too long")
877 memcpy(buffer, PyString_AS_STRING(value), PyString_GET_SIZE(value))
878 read[0] = PyString_GET_SIZE(value)
879 return 1
881 cdef class CEmitter:
883 cdef yaml_emitter_t emitter
885 cdef object stream
887 cdef yaml_encoding_t use_encoding
888 cdef int document_start_implicit
889 cdef int document_end_implicit
890 cdef object use_version
891 cdef object use_tags
893 cdef object serialized_nodes
894 cdef object anchors
895 cdef int last_alias_id
896 cdef int closed
898 def __init__(self, stream, canonical=None, indent=None, width=None,
899 allow_unicode=None, line_break=None, encoding=None,
900 explicit_start=None, explicit_end=None, version=None, tags=None):
901 if yaml_emitter_initialize(&self.emitter) == 0:
902 raise MemoryError
903 self.stream = stream
904 yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
905 if canonical is not None:
906 yaml_emitter_set_canonical(&self.emitter, 1)
907 if indent is not None:
908 yaml_emitter_set_indent(&self.emitter, indent)
909 if width is not None:
910 yaml_emitter_set_width(&self.emitter, width)
911 if allow_unicode is not None:
912 yaml_emitter_set_unicode(&self.emitter, 1)
913 if line_break is not None:
914 if line_break == '\r':
915 yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK)
916 elif line_break == '\n':
917 yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK)
918 elif line_break == '\r\n':
919 yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK)
920 if encoding == 'utf-16-le':
921 self.use_encoding = YAML_UTF16LE_ENCODING
922 elif encoding == 'utf-16-be':
923 self.use_encoding = YAML_UTF16BE_ENCODING
924 else:
925 self.use_encoding = YAML_UTF8_ENCODING
926 self.document_start_implicit = 1
927 if explicit_start:
928 self.document_start_implicit = 0
929 self.document_end_implicit = 1
930 if explicit_end:
931 self.document_end_implicit = 0
932 self.use_version = version
933 self.use_tags = tags
934 self.serialized_nodes = {}
935 self.anchors = {}
936 self.last_alias_id = 0
937 self.closed = -1
939 def __dealloc__(self):
940 yaml_emitter_delete(&self.emitter)
942 cdef object _emitter_error(self):
943 if self.emitter.error == YAML_MEMORY_ERROR:
944 return MemoryError
945 elif self.emitter.error == YAML_EMITTER_ERROR:
946 return EmitterError(self.emitter.problem)
947 raise ValueError("no emitter error")
949 cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0:
950 cdef yaml_encoding_t encoding
951 cdef yaml_version_directive_t version_directive_value
952 cdef yaml_version_directive_t *version_directive
953 cdef yaml_tag_directive_t tag_directives_value[128]
954 cdef yaml_tag_directive_t *tag_directives_start
955 cdef yaml_tag_directive_t *tag_directives_end
956 cdef int implicit
957 cdef int plain_implicit
958 cdef int quoted_implicit
959 cdef char *anchor
960 cdef char *tag
961 cdef char *value
962 cdef int length
963 cdef yaml_scalar_style_t scalar_style
964 cdef yaml_sequence_style_t sequence_style
965 cdef yaml_mapping_style_t mapping_style
966 event_class = event_object.__class__
967 if event_class is StreamStartEvent:
968 encoding = YAML_UTF8_ENCODING
969 if event_object.encoding == 'utf-16-le':
970 encoding = YAML_UTF16LE_ENCODING
971 elif event_object.encoding == 'utf-16-be':
972 encoding = YAML_UTF16BE_ENCODING
973 yaml_stream_start_event_initialize(event, encoding)
974 elif event_class is StreamEndEvent:
975 yaml_stream_end_event_initialize(event)
976 elif event_class is DocumentStartEvent:
977 version_directive = NULL
978 if event_object.version:
979 version_directive_value.major = event_object.version[0]
980 version_directive_value.minor = event_object.version[1]
981 version_directive = &version_directive_value
982 tag_directives_start = NULL
983 tag_directives_end = NULL
984 if event_object.tags:
985 if len(event_object.tags) > 128:
986 raise ValueError("too many tags")
987 tag_directives_start = tag_directives_value
988 tag_directives_end = tag_directives_value
989 cache = []
990 for handle in event_object.tags:
991 prefix = event_object.tags[handle]
992 if PyUnicode_CheckExact(handle):
993 handle = PyUnicode_AsUTF8String(handle)
994 cache.append(handle)
995 if not PyString_CheckExact(handle):
996 raise TypeError("tag handle must be a string")
997 tag_directives_end.handle = PyString_AS_STRING(handle)
998 if PyUnicode_CheckExact(prefix):
999 prefix = PyUnicode_AsUTF8String(prefix)
1000 cache.append(prefix)
1001 if not PyString_CheckExact(prefix):
1002 raise TypeError("tag prefix must be a string")
1003 tag_directives_end.prefix = PyString_AS_STRING(prefix)
1004 tag_directives_end = tag_directives_end+1
1005 implicit = 1
1006 if event_object.explicit:
1007 implicit = 0
1008 if yaml_document_start_event_initialize(event, version_directive,
1009 tag_directives_start, tag_directives_end, implicit) == 0:
1010 raise MemoryError
1011 elif event_class is DocumentEndEvent:
1012 implicit = 1
1013 if event_object.explicit:
1014 implicit = 0
1015 yaml_document_end_event_initialize(event, implicit)
1016 elif event_class is AliasEvent:
1017 anchor = NULL
1018 anchor_object = event_object.anchor
1019 if PyUnicode_CheckExact(anchor_object):
1020 anchor_object = PyUnicode_AsUTF8String(anchor_object)
1021 if not PyString_CheckExact(anchor_object):
1022 raise TypeError("anchor must be a string")
1023 anchor = PyString_AS_STRING(anchor_object)
1024 if yaml_alias_event_initialize(event, anchor) == 0:
1025 raise MemoryError
1026 elif event_class is ScalarEvent:
1027 anchor = NULL
1028 anchor_object = event_object.anchor
1029 if anchor_object is not None:
1030 if PyUnicode_CheckExact(anchor_object):
1031 anchor_object = PyUnicode_AsUTF8String(anchor_object)
1032 if not PyString_CheckExact(anchor_object):
1033 raise TypeError("anchor must be a string")
1034 anchor = PyString_AS_STRING(anchor_object)
1035 tag = NULL
1036 tag_object = event_object.tag
1037 if tag_object is not None:
1038 if PyUnicode_CheckExact(tag_object):
1039 tag_object = PyUnicode_AsUTF8String(tag_object)
1040 if not PyString_CheckExact(tag_object):
1041 raise TypeError("tag must be a string")
1042 tag = PyString_AS_STRING(tag_object)
1043 value_object = event_object.value
1044 if PyUnicode_CheckExact(value_object):
1045 value_object = PyUnicode_AsUTF8String(value_object)
1046 if not PyString_CheckExact(value_object):
1047 raise TypeError("value must be a string")
1048 value = PyString_AS_STRING(value_object)
1049 length = PyString_GET_SIZE(value_object)
1050 plain_implicit = 0
1051 quoted_implicit = 0
1052 if event_object.implicit is not None:
1053 plain_implicit = event_object.implicit[0]
1054 quoted_implicit = event_object.implicit[1]
1055 style_object = event_object.style
1056 scalar_style = YAML_PLAIN_SCALAR_STYLE
1057 if style_object == "'":
1058 scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
1059 elif style_object == "\"":
1060 scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
1061 elif style_object == "|":
1062 scalar_style = YAML_LITERAL_SCALAR_STYLE
1063 elif style_object == ">":
1064 scalar_style = YAML_FOLDED_SCALAR_STYLE
1065 if yaml_scalar_event_initialize(event, anchor, tag, value, length,
1066 plain_implicit, quoted_implicit, scalar_style) == 0:
1067 raise MemoryError
1068 elif event_class is SequenceStartEvent:
1069 anchor = NULL
1070 anchor_object = event_object.anchor
1071 if anchor_object is not None:
1072 if PyUnicode_CheckExact(anchor_object):
1073 anchor_object = PyUnicode_AsUTF8String(anchor_object)
1074 if not PyString_CheckExact(anchor_object):
1075 raise TypeError("anchor must be a string")
1076 anchor = PyString_AS_STRING(anchor_object)
1077 tag = NULL
1078 tag_object = event_object.tag
1079 if tag_object is not None:
1080 if PyUnicode_CheckExact(tag_object):
1081 tag_object = PyUnicode_AsUTF8String(tag_object)
1082 if not PyString_CheckExact(tag_object):
1083 raise TypeError("tag must be a string")
1084 tag = PyString_AS_STRING(tag_object)
1085 implicit = 0
1086 if event_object.implicit:
1087 implicit = 1
1088 sequence_style = YAML_BLOCK_SEQUENCE_STYLE
1089 if event_object.flow_style:
1090 sequence_style = YAML_FLOW_SEQUENCE_STYLE
1091 if yaml_sequence_start_event_initialize(event, anchor, tag,
1092 implicit, sequence_style) == 0:
1093 raise MemoryError
1094 elif event_class is MappingStartEvent:
1095 anchor = NULL
1096 anchor_object = event_object.anchor
1097 if anchor_object is not None:
1098 if PyUnicode_CheckExact(anchor_object):
1099 anchor_object = PyUnicode_AsUTF8String(anchor_object)
1100 if not PyString_CheckExact(anchor_object):
1101 raise TypeError("anchor must be a string")
1102 anchor = PyString_AS_STRING(anchor_object)
1103 tag = NULL
1104 tag_object = event_object.tag
1105 if tag_object is not None:
1106 if PyUnicode_CheckExact(tag_object):
1107 tag_object = PyUnicode_AsUTF8String(tag_object)
1108 if not PyString_CheckExact(tag_object):
1109 raise TypeError("tag must be a string")
1110 tag = PyString_AS_STRING(tag_object)
1111 implicit = 0
1112 if event_object.implicit:
1113 implicit = 1
1114 mapping_style = YAML_BLOCK_MAPPING_STYLE
1115 if event_object.flow_style:
1116 mapping_style = YAML_FLOW_MAPPING_STYLE
1117 if yaml_mapping_start_event_initialize(event, anchor, tag,
1118 implicit, mapping_style) == 0:
1119 raise MemoryError
1120 elif event_class is SequenceEndEvent:
1121 yaml_sequence_end_event_initialize(event)
1122 elif event_class is MappingEndEvent:
1123 yaml_mapping_end_event_initialize(event)
1124 else:
1125 raise TypeError("invalid event %s" % event_object)
1126 return 1
1128 def emit(self, event_object):
1129 cdef yaml_event_t event
1130 self._object_to_event(event_object, &event)
1131 if yaml_emitter_emit(&self.emitter, &event) == 0:
1132 error = self._emitter_error()
1133 raise error
1135 def open(self):
1136 cdef yaml_event_t event
1137 if self.closed == -1:
1138 yaml_stream_start_event_initialize(&event, self.use_encoding)
1139 if yaml_emitter_emit(&self.emitter, &event) == 0:
1140 error = self._emitter_error()
1141 raise error
1142 self.closed = 0
1143 elif self.closed == 1:
1144 raise SerializerError("serializer is closed")
1145 else:
1146 raise SerializerError("serializer is already opened")
1148 def close(self):
1149 cdef yaml_event_t event
1150 if self.closed == -1:
1151 raise SerializerError("serializer is not opened")
1152 elif self.closed == 0:
1153 yaml_stream_end_event_initialize(&event)
1154 if yaml_emitter_emit(&self.emitter, &event) == 0:
1155 error = self._emitter_error()
1156 raise error
1157 self.closed = 1
1159 def serialize(self, node):
1160 cdef yaml_event_t event
1161 cdef yaml_version_directive_t version_directive_value
1162 cdef yaml_version_directive_t *version_directive
1163 cdef yaml_tag_directive_t tag_directives_value[128]
1164 cdef yaml_tag_directive_t *tag_directives_start
1165 cdef yaml_tag_directive_t *tag_directives_end
1166 if self.closed == -1:
1167 raise SerializerError("serializer is not opened")
1168 elif self.closed == 1:
1169 raise SerializerError("serializer is closed")
1170 cache = []
1171 version_directive = NULL
1172 if self.use_version:
1173 version_directive_value.major = self.use_version[0]
1174 version_directive_value.minor = self.use_version[1]
1175 version_directive = &version_directive_value
1176 tag_directives_start = NULL
1177 tag_directives_end = NULL
1178 if self.use_tags:
1179 if len(self.use_tags) > 128:
1180 raise ValueError("too many tags")
1181 tag_directives_start = tag_directives_value
1182 tag_directives_end = tag_directives_value
1183 for handle in self.use_tags:
1184 prefix = self.use_tags[handle]
1185 if PyUnicode_CheckExact(handle):
1186 handle = PyUnicode_AsUTF8String(handle)
1187 cache.append(handle)
1188 if not PyString_CheckExact(handle):
1189 raise TypeError("tag handle must be a string")
1190 tag_directives_end.handle = PyString_AS_STRING(handle)
1191 if PyUnicode_CheckExact(prefix):
1192 prefix = PyUnicode_AsUTF8String(prefix)
1193 cache.append(prefix)
1194 if not PyString_CheckExact(prefix):
1195 raise TypeError("tag prefix must be a string")
1196 tag_directives_end.prefix = PyString_AS_STRING(prefix)
1197 tag_directives_end = tag_directives_end+1
1198 if yaml_document_start_event_initialize(&event, version_directive,
1199 tag_directives_start, tag_directives_end,
1200 self.document_start_implicit) == 0:
1201 raise MemoryError
1202 if yaml_emitter_emit(&self.emitter, &event) == 0:
1203 error = self._emitter_error()
1204 raise error
1205 self._anchor_node(node)
1206 self._serialize_node(node, None, None)
1207 yaml_document_end_event_initialize(&event, self.document_end_implicit)
1208 if yaml_emitter_emit(&self.emitter, &event) == 0:
1209 error = self._emitter_error()
1210 raise error
1211 self.serialized_nodes = {}
1212 self.anchors = {}
1213 self.last_alias_id = 0
1215 cdef int _anchor_node(self, object node) except 0:
1216 if node in self.anchors:
1217 if self.anchors[node] is None:
1218 self.last_alias_id = self.last_alias_id+1
1219 self.anchors[node] = "id%03d" % self.last_alias_id
1220 else:
1221 self.anchors[node] = None
1222 node_class = node.__class__
1223 if node_class is SequenceNode:
1224 for item in node.value:
1225 self._anchor_node(item)
1226 elif node_class is MappingNode:
1227 for key, value in node.value:
1228 self._anchor_node(key)
1229 self._anchor_node(value)
1230 return 1
1232 cdef int _serialize_node(self, object node, object parent, object index) except 0:
1233 cdef yaml_event_t event
1234 cdef int implicit
1235 cdef int plain_implicit
1236 cdef int quoted_implicit
1237 cdef char *anchor
1238 cdef char *tag
1239 cdef char *value
1240 cdef int length
1241 cdef int item_index
1242 cdef yaml_scalar_style_t scalar_style
1243 cdef yaml_sequence_style_t sequence_style
1244 cdef yaml_mapping_style_t mapping_style
1245 anchor_object = self.anchors[node]
1246 anchor = NULL
1247 if anchor_object is not None:
1248 anchor = PyString_AS_STRING(anchor_object)
1249 if node in self.serialized_nodes:
1250 if yaml_alias_event_initialize(&event, anchor) == 0:
1251 raise MemoryError
1252 if yaml_emitter_emit(&self.emitter, &event) == 0:
1253 error = self._emitter_error()
1254 raise error
1255 else:
1256 node_class = node.__class__
1257 self.serialized_nodes[node] = True
1258 self.descend_resolver(parent, index)
1259 if node_class is ScalarNode:
1260 plain_implicit = 0
1261 quoted_implicit = 0
1262 tag_object = node.tag
1263 if self.resolve(ScalarNode, node.value, (True, False)) == tag_object:
1264 plain_implicit = 1
1265 if self.resolve(ScalarNode, node.value, (False, True)) == tag_object:
1266 quoted_implicit = 1
1267 tag = NULL
1268 if tag_object is not None:
1269 if PyUnicode_CheckExact(tag_object):
1270 tag_object = PyUnicode_AsUTF8String(tag_object)
1271 if not PyString_CheckExact(tag_object):
1272 raise TypeError("tag must be a string")
1273 tag = PyString_AS_STRING(tag_object)
1274 value_object = node.value
1275 if PyUnicode_CheckExact(value_object):
1276 value_object = PyUnicode_AsUTF8String(value_object)
1277 if not PyString_CheckExact(value_object):
1278 raise TypeError("value must be a string")
1279 value = PyString_AS_STRING(value_object)
1280 length = PyString_GET_SIZE(value_object)
1281 style_object = node.style
1282 scalar_style = YAML_PLAIN_SCALAR_STYLE
1283 if style_object == "'":
1284 scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE
1285 elif style_object == "\"":
1286 scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE
1287 elif style_object == "|":
1288 scalar_style = YAML_LITERAL_SCALAR_STYLE
1289 elif style_object == ">":
1290 scalar_style = YAML_FOLDED_SCALAR_STYLE
1291 if yaml_scalar_event_initialize(&event, anchor, tag, value, length,
1292 plain_implicit, quoted_implicit, scalar_style) == 0:
1293 raise MemoryError
1294 if yaml_emitter_emit(&self.emitter, &event) == 0:
1295 error = self._emitter_error()
1296 raise error
1297 elif node_class is SequenceNode:
1298 implicit = 0
1299 tag_object = node.tag
1300 if self.resolve(SequenceNode, node.value, True) == tag_object:
1301 implicit = 1
1302 tag = NULL
1303 if tag_object is not None:
1304 if PyUnicode_CheckExact(tag_object):
1305 tag_object = PyUnicode_AsUTF8String(tag_object)
1306 if not PyString_CheckExact(tag_object):
1307 raise TypeError("tag must be a string")
1308 tag = PyString_AS_STRING(tag_object)
1309 sequence_style = YAML_BLOCK_SEQUENCE_STYLE
1310 if node.flow_style:
1311 sequence_style = YAML_FLOW_SEQUENCE_STYLE
1312 if yaml_sequence_start_event_initialize(&event, anchor, tag,
1313 implicit, sequence_style) == 0:
1314 raise MemoryError
1315 if yaml_emitter_emit(&self.emitter, &event) == 0:
1316 error = self._emitter_error()
1317 raise error
1318 item_index = 0
1319 for item in node.value:
1320 self._serialize_node(item, node, item_index)
1321 item_index = item_index+1
1322 yaml_sequence_end_event_initialize(&event)
1323 if yaml_emitter_emit(&self.emitter, &event) == 0:
1324 error = self._emitter_error()
1325 raise error
1326 elif node_class is MappingNode:
1327 implicit = 0
1328 tag_object = node.tag
1329 if self.resolve(MappingNode, node.value, True) == tag_object:
1330 implicit = 1
1331 tag = NULL
1332 if tag_object is not None:
1333 if PyUnicode_CheckExact(tag_object):
1334 tag_object = PyUnicode_AsUTF8String(tag_object)
1335 if not PyString_CheckExact(tag_object):
1336 raise TypeError("tag must be a string")
1337 tag = PyString_AS_STRING(tag_object)
1338 mapping_style = YAML_BLOCK_MAPPING_STYLE
1339 if node.flow_style:
1340 mapping_style = YAML_FLOW_MAPPING_STYLE
1341 if yaml_mapping_start_event_initialize(&event, anchor, tag,
1342 implicit, mapping_style) == 0:
1343 raise MemoryError
1344 if yaml_emitter_emit(&self.emitter, &event) == 0:
1345 error = self._emitter_error()
1346 raise error
1347 for item_key, item_value in node.value:
1348 self._serialize_node(item_key, node, None)
1349 self._serialize_node(item_value, node, item_key)
1350 yaml_mapping_end_event_initialize(&event)
1351 if yaml_emitter_emit(&self.emitter, &event) == 0:
1352 error = self._emitter_error()
1353 raise error
1354 return 1
1356 cdef int output_handler(void *data, char *buffer, int size) except 0:
1357 cdef CEmitter emitter
1358 emitter = <CEmitter>data
1359 value = PyString_FromStringAndSize(buffer, size)
1360 emitter.stream.write(value)
1361 return 1