docs-xml: fix typos and format in smb.conf server max protocol man
[Samba/gebeck_regimport.git] / lib / dnspython / tests / tokenizer.py
blob1d561ae1b5dc06e74787d3d1610efca676118026
1 # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
3 # Permission to use, copy, modify, and distribute this software and its
4 # documentation for any purpose with or without fee is hereby granted,
5 # provided that the above copyright notice and this permission notice
6 # appear in all copies.
8 # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
9 # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
11 # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
14 # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 import unittest
18 import dns.exception
19 import dns.tokenizer
21 Token = dns.tokenizer.Token
23 class TokenizerTestCase(unittest.TestCase):
25 def testQuotedString1(self):
26 tok = dns.tokenizer.Tokenizer(r'"foo"')
27 token = tok.get()
28 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo'))
30 def testQuotedString2(self):
31 tok = dns.tokenizer.Tokenizer(r'""')
32 token = tok.get()
33 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, ''))
35 def testQuotedString3(self):
36 tok = dns.tokenizer.Tokenizer(r'"\"foo\""')
37 token = tok.get()
38 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, '"foo"'))
40 def testQuotedString4(self):
41 tok = dns.tokenizer.Tokenizer(r'"foo\010bar"')
42 token = tok.get()
43 self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo\x0abar'))
45 def testQuotedString5(self):
46 def bad():
47 tok = dns.tokenizer.Tokenizer(r'"foo')
48 token = tok.get()
49 self.failUnlessRaises(dns.exception.UnexpectedEnd, bad)
51 def testQuotedString6(self):
52 def bad():
53 tok = dns.tokenizer.Tokenizer(r'"foo\01')
54 token = tok.get()
55 self.failUnlessRaises(dns.exception.SyntaxError, bad)
57 def testQuotedString7(self):
58 def bad():
59 tok = dns.tokenizer.Tokenizer('"foo\nbar"')
60 token = tok.get()
61 self.failUnlessRaises(dns.exception.SyntaxError, bad)
63 def testEmpty1(self):
64 tok = dns.tokenizer.Tokenizer('')
65 token = tok.get()
66 self.failUnless(token.is_eof())
68 def testEmpty2(self):
69 tok = dns.tokenizer.Tokenizer('')
70 token1 = tok.get()
71 token2 = tok.get()
72 self.failUnless(token1.is_eof() and token2.is_eof())
74 def testEOL(self):
75 tok = dns.tokenizer.Tokenizer('\n')
76 token1 = tok.get()
77 token2 = tok.get()
78 self.failUnless(token1.is_eol() and token2.is_eof())
80 def testWS1(self):
81 tok = dns.tokenizer.Tokenizer(' \n')
82 token1 = tok.get()
83 self.failUnless(token1.is_eol())
85 def testWS2(self):
86 tok = dns.tokenizer.Tokenizer(' \n')
87 token1 = tok.get(want_leading=True)
88 self.failUnless(token1.is_whitespace())
90 def testComment1(self):
91 tok = dns.tokenizer.Tokenizer(' ;foo\n')
92 token1 = tok.get()
93 self.failUnless(token1.is_eol())
95 def testComment2(self):
96 tok = dns.tokenizer.Tokenizer(' ;foo\n')
97 token1 = tok.get(want_comment = True)
98 token2 = tok.get()
99 self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo') and
100 token2.is_eol())
102 def testComment3(self):
103 tok = dns.tokenizer.Tokenizer(' ;foo bar\n')
104 token1 = tok.get(want_comment = True)
105 token2 = tok.get()
106 self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo bar') and
107 token2.is_eol())
109 def testMultiline1(self):
110 tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)')
111 tokens = list(iter(tok))
112 self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
113 Token(dns.tokenizer.IDENTIFIER, 'bar')])
115 def testMultiline2(self):
116 tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)\n')
117 tokens = list(iter(tok))
118 self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
119 Token(dns.tokenizer.IDENTIFIER, 'bar'),
120 Token(dns.tokenizer.EOL, '\n')])
121 def testMultiline3(self):
122 def bad():
123 tok = dns.tokenizer.Tokenizer('foo)')
124 tokens = list(iter(tok))
125 self.failUnlessRaises(dns.exception.SyntaxError, bad)
127 def testMultiline4(self):
128 def bad():
129 tok = dns.tokenizer.Tokenizer('((foo)')
130 tokens = list(iter(tok))
131 self.failUnlessRaises(dns.exception.SyntaxError, bad)
133 def testUnget1(self):
134 tok = dns.tokenizer.Tokenizer('foo')
135 t1 = tok.get()
136 tok.unget(t1)
137 t2 = tok.get()
138 self.failUnless(t1 == t2 and t1.ttype == dns.tokenizer.IDENTIFIER and \
139 t1.value == 'foo')
141 def testUnget2(self):
142 def bad():
143 tok = dns.tokenizer.Tokenizer('foo')
144 t1 = tok.get()
145 tok.unget(t1)
146 tok.unget(t1)
147 self.failUnlessRaises(dns.tokenizer.UngetBufferFull, bad)
149 def testGetEOL1(self):
150 tok = dns.tokenizer.Tokenizer('\n')
151 t = tok.get_eol()
152 self.failUnless(t == '\n')
154 def testGetEOL2(self):
155 tok = dns.tokenizer.Tokenizer('')
156 t = tok.get_eol()
157 self.failUnless(t == '')
159 def testEscapedDelimiter1(self):
160 tok = dns.tokenizer.Tokenizer(r'ch\ ld')
161 t = tok.get()
162 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\ ld')
164 def testEscapedDelimiter2(self):
165 tok = dns.tokenizer.Tokenizer(r'ch\032ld')
166 t = tok.get()
167 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\032ld')
169 def testEscapedDelimiter3(self):
170 tok = dns.tokenizer.Tokenizer(r'ch\ild')
171 t = tok.get()
172 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\ild')
174 def testEscapedDelimiter1u(self):
175 tok = dns.tokenizer.Tokenizer(r'ch\ ld')
176 t = tok.get().unescape()
177 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch ld')
179 def testEscapedDelimiter2u(self):
180 tok = dns.tokenizer.Tokenizer(r'ch\032ld')
181 t = tok.get().unescape()
182 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == 'ch ld')
184 def testEscapedDelimiter3u(self):
185 tok = dns.tokenizer.Tokenizer(r'ch\ild')
186 t = tok.get().unescape()
187 self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'child')
189 if __name__ == '__main__':
190 unittest.main()