All tests passed! Scanner and Parser seem to be correct.
[pyyaml/python3.git] / tests / test_tokens.py
blobe9cca5e9d3c9279091135e7e7138214b354dce07
2 import test_appliance
4 from yaml.scanner import *
6 class TestTokens(test_appliance.TestAppliance):
8 # Tokens mnemonic:
9 # directive: %
10 # document_start: ---
11 # document_end: ...
12 # alias: *
13 # anchor: &
14 # tag: !
15 # scalar _
16 # block_sequence_start: [[
17 # block_mapping_start: {{
18 # block_end: ]}
19 # flow_sequence_start: [
20 # flow_sequence_end: ]
21 # flow_mapping_start: {
22 # flow_mapping_end: }
23 # entry: ,
24 # key: ?
25 # value: :
27 replaces = {
28 YAMLDirectiveToken: '%',
29 TagDirectiveToken: '%',
30 ReservedDirectiveToken: '%',
31 DocumentStartToken: '---',
32 DocumentEndToken: '...',
33 AliasToken: '*',
34 AnchorToken: '&',
35 TagToken: '!',
36 ScalarToken: '_',
37 BlockSequenceStartToken: '[[',
38 BlockMappingStartToken: '{{',
39 BlockEndToken: ']}',
40 FlowSequenceStartToken: '[',
41 FlowSequenceEndToken: ']',
42 FlowMappingStartToken: '{',
43 FlowMappingEndToken: '}',
44 EntryToken: ',',
45 KeyToken: '?',
46 ValueToken: ':',
49 def _testTokens(self, test_name, data_filename, tokens_filename):
50 tokens1 = None
51 tokens2 = file(tokens_filename, 'rb').read().split()
52 try:
53 scanner = Scanner(data_filename, file(data_filename, 'rb').read())
54 tokens1 = []
55 while not isinstance(scanner.peek_token(), EndToken):
56 tokens1.append(scanner.get_token())
57 tokens1 = [self.replaces[t.__class__] for t in tokens1]
58 self.failUnlessEqual(tokens1, tokens2)
59 except:
60 print
61 print "DATA:"
62 print file(data_filename, 'rb').read()
63 print "TOKENS1:", tokens1
64 print "TOKENS2:", tokens2
65 raise
67 TestTokens.add_tests('testTokens', '.data', '.tokens')
69 class TestScanner(test_appliance.TestAppliance):
71 def _testScanner(self, test_name, data_filename, canonical_filename):
72 for filename in [canonical_filename, data_filename]:
73 tokens = None
74 try:
75 scanner = Scanner(filename, file(filename, 'rb').read())
76 tokens = []
77 while not isinstance(scanner.peek_token(), EndToken):
78 tokens.append(scanner.get_token().__class__.__name__)
79 except:
80 print
81 print "DATA:"
82 print file(data_filename, 'rb').read()
83 print "TOKENS:", tokens
84 raise
86 TestScanner.add_tests('testScanner', '.data', '.canonical')