Scanner is complete.
[pyyaml/python3.git] / tests / test_tokens.py
blobf5daaf22c887648a056cafe84f0dc91c6a9df6b6
2 import test_appliance
4 from yaml.reader import *
5 from yaml.tokens import *
6 from yaml.scanner import *
8 class TestTokens(test_appliance.TestAppliance):
10 # Tokens mnemonic:
11 # directive: %
12 # document_start: ---
13 # document_end: ...
14 # alias: *
15 # anchor: &
16 # tag: !
17 # scalar _
18 # block_sequence_start: [[
19 # block_mapping_start: {{
20 # block_end: ]}
21 # flow_sequence_start: [
22 # flow_sequence_end: ]
23 # flow_mapping_start: {
24 # flow_mapping_end: }
25 # entry: ,
26 # key: ?
27 # value: :
29 replaces = {
30 DirectiveToken: '%',
31 DocumentStartToken: '---',
32 DocumentEndToken: '...',
33 AliasToken: '*',
34 AnchorToken: '&',
35 TagToken: '!',
36 ScalarToken: '_',
37 BlockSequenceStartToken: '[[',
38 BlockMappingStartToken: '{{',
39 BlockEndToken: ']}',
40 FlowSequenceStartToken: '[',
41 FlowSequenceEndToken: ']',
42 FlowMappingStartToken: '{',
43 FlowMappingEndToken: '}',
44 EntryToken: ',',
45 KeyToken: '?',
46 ValueToken: ':',
49 def _testTokens(self, test_name, data_filename, tokens_filename):
50 tokens1 = None
51 tokens2 = file(tokens_filename, 'rb').read().split()
52 try:
53 scanner = Scanner(Reader(file(data_filename, 'rb')))
54 tokens1 = []
55 while not isinstance(scanner.peek_token(), StreamEndToken):
56 tokens1.append(scanner.get_token())
57 tokens1 = [self.replaces[t.__class__] for t in tokens1]
58 self.failUnlessEqual(tokens1, tokens2)
59 except:
60 print
61 print "DATA:"
62 print file(data_filename, 'rb').read()
63 print "TOKENS1:", tokens1
64 print "TOKENS2:", tokens2
65 raise
67 TestTokens.add_tests('testTokens', '.data', '.tokens')
69 class TestScanner(test_appliance.TestAppliance):
71 def _testScanner(self, test_name, data_filename, canonical_filename):
72 for filename in [canonical_filename, data_filename]:
73 tokens = None
74 try:
75 scanner = Scanner(Reader(file(filename, 'rb')))
76 tokens = []
77 while not isinstance(scanner.peek_token(), StreamEndToken):
78 tokens.append(scanner.get_token().__class__.__name__)
79 except:
80 print
81 print "DATA:"
82 print file(data_filename, 'rb').read()
83 print "TOKENS:", tokens
84 raise
86 TestScanner.add_tests('testScanner', '.data', '.canonical')