4 from yaml
.scanner
import *
6 class TestTokens(test_appliance
.TestAppliance
):
16 # block_sequence_start: [[
17 # block_mapping_start: {{
19 # flow_sequence_start: [
20 # flow_sequence_end: ]
21 # flow_mapping_start: {
28 YAMLDirectiveToken
: '%',
29 TagDirectiveToken
: '%',
30 ReservedDirectiveToken
: '%',
31 DocumentStartToken
: '---',
32 DocumentEndToken
: '...',
37 BlockSequenceStartToken
: '[[',
38 BlockMappingStartToken
: '{{',
40 FlowSequenceStartToken
: '[',
41 FlowSequenceEndToken
: ']',
42 FlowMappingStartToken
: '{',
43 FlowMappingEndToken
: '}',
49 def _testTokens(self
, test_name
, data_filename
, tokens_filename
):
51 tokens2
= file(tokens_filename
, 'rb').read().split()
53 scanner
= Scanner(data_filename
, file(data_filename
, 'rb').read())
55 while not isinstance(scanner
.peek_token(), EndToken
):
56 tokens1
.append(scanner
.get_token())
57 tokens1
= [self
.replaces
[t
.__class
__] for t
in tokens1
]
58 self
.failUnlessEqual(tokens1
, tokens2
)
62 print file(data_filename
, 'rb').read()
63 print "TOKENS1:", tokens1
64 print "TOKENS2:", tokens2
67 TestTokens
.add_tests('testTokens', '.data', '.tokens')
69 class TestScanner(test_appliance
.TestAppliance
):
71 def _testScanner(self
, test_name
, data_filename
, canonical_filename
):
72 for filename
in [canonical_filename
, data_filename
]:
75 scanner
= Scanner(filename
, file(filename
, 'rb').read())
77 while not isinstance(scanner
.peek_token(), EndToken
):
78 tokens
.append(scanner
.get_token().__class
__.__name
__)
82 print file(data_filename
, 'rb').read()
83 print "TOKENS:", tokens
86 TestScanner
.add_tests('testScanner', '.data', '.canonical')