Some renaming.
[pyyaml/python3.git] / tests / test_tokens.py
blob1343e57ca2afa4f185046d9a42d560ba3ad3ea41
2 import test_appliance
4 from yaml.reader import *
5 from yaml.tokens import *
6 from yaml.scanner import *
8 class TestTokens(test_appliance.TestAppliance):
10 # Tokens mnemonic:
11 # directive: %
12 # document_start: ---
13 # document_end: ...
14 # alias: *
15 # anchor: &
16 # tag: !
17 # scalar _
18 # block_sequence_start: [[
19 # block_mapping_start: {{
20 # block_end: ]}
21 # flow_sequence_start: [
22 # flow_sequence_end: ]
23 # flow_mapping_start: {
24 # flow_mapping_end: }
25 # entry: ,
26 # key: ?
27 # value: :
29 replaces = {
30 YAMLDirectiveToken: '%',
31 TagDirectiveToken: '%',
32 ReservedDirectiveToken: '%',
33 DocumentStartToken: '---',
34 DocumentEndToken: '...',
35 AliasToken: '*',
36 AnchorToken: '&',
37 TagToken: '!',
38 ScalarToken: '_',
39 BlockSequenceStartToken: '[[',
40 BlockMappingStartToken: '{{',
41 BlockEndToken: ']}',
42 FlowSequenceStartToken: '[',
43 FlowSequenceEndToken: ']',
44 FlowMappingStartToken: '{',
45 FlowMappingEndToken: '}',
46 EntryToken: ',',
47 KeyToken: '?',
48 ValueToken: ':',
51 def _testTokens(self, test_name, data_filename, tokens_filename):
52 tokens1 = None
53 tokens2 = file(tokens_filename, 'rb').read().split()
54 try:
55 scanner = Scanner(Reader(file(data_filename, 'rb')))
56 tokens1 = []
57 while not isinstance(scanner.peek_token(), EndToken):
58 tokens1.append(scanner.get_token())
59 tokens1 = [self.replaces[t.__class__] for t in tokens1]
60 self.failUnlessEqual(tokens1, tokens2)
61 except:
62 print
63 print "DATA:"
64 print file(data_filename, 'rb').read()
65 print "TOKENS1:", tokens1
66 print "TOKENS2:", tokens2
67 raise
69 TestTokens.add_tests('testTokens', '.data', '.tokens')
71 class TestScanner(test_appliance.TestAppliance):
73 def _testScanner(self, test_name, data_filename, canonical_filename):
74 for filename in [canonical_filename, data_filename]:
75 tokens = None
76 try:
77 scanner = Scanner(Reader(file(filename, 'rb')))
78 tokens = []
79 while not isinstance(scanner.peek_token(), EndToken):
80 tokens.append(scanner.get_token().__class__.__name__)
81 except:
82 print
83 print "DATA:"
84 print file(data_filename, 'rb').read()
85 print "TOKENS:", tokens
86 raise
88 TestScanner.add_tests('testScanner', '.data', '.canonical')