2 # Secret Labs' Regular Expression Engine
4 # convert template to internal format
6 # Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
8 # See the sre.py file for information on usage and redistribution.
11 """Internal support module for sre"""
15 from sre_constants
import *
17 assert _sre
.MAGIC
== MAGIC
, "SRE module mismatch"
19 if _sre
.CODESIZE
== 2:
24 def _identityfunction(x
):
33 _LITERAL_CODES
= set([LITERAL
, NOT_LITERAL
])
34 _REPEATING_CODES
= set([REPEAT
, MIN_REPEAT
, MAX_REPEAT
])
35 _SUCCESS_CODES
= set([SUCCESS
, FAILURE
])
36 _ASSERT_CODES
= set([ASSERT
, ASSERT_NOT
])
38 def _compile(code
, pattern
, flags
):
39 # internal: compile a (sub)pattern
42 LITERAL_CODES
= _LITERAL_CODES
43 REPEATING_CODES
= _REPEATING_CODES
44 SUCCESS_CODES
= _SUCCESS_CODES
45 ASSERT_CODES
= _ASSERT_CODES
46 for op
, av
in pattern
:
47 if op
in LITERAL_CODES
:
48 if flags
& SRE_FLAG_IGNORECASE
:
49 emit(OPCODES
[OP_IGNORE
[op
]])
50 emit(_sre
.getlower(av
, flags
))
55 if flags
& SRE_FLAG_IGNORECASE
:
56 emit(OPCODES
[OP_IGNORE
[op
]])
57 def fixup(literal
, flags
=flags
):
58 return _sre
.getlower(literal
, flags
)
61 fixup
= _identityfunction
62 skip
= _len(code
); emit(0)
63 _compile_charset(av
, flags
, code
, fixup
)
64 code
[skip
] = _len(code
) - skip
66 if flags
& SRE_FLAG_DOTALL
:
67 emit(OPCODES
[ANY_ALL
])
70 elif op
in REPEATING_CODES
:
71 if flags
& SRE_FLAG_TEMPLATE
:
72 raise error
, "internal: unsupported template operator"
74 skip
= _len(code
); emit(0)
77 _compile(code
, av
[2], flags
)
78 emit(OPCODES
[SUCCESS
])
79 code
[skip
] = _len(code
) - skip
80 elif _simple(av
) and op
is not REPEAT
:
82 emit(OPCODES
[REPEAT_ONE
])
84 emit(OPCODES
[MIN_REPEAT_ONE
])
85 skip
= _len(code
); emit(0)
88 _compile(code
, av
[2], flags
)
89 emit(OPCODES
[SUCCESS
])
90 code
[skip
] = _len(code
) - skip
93 skip
= _len(code
); emit(0)
96 _compile(code
, av
[2], flags
)
97 code
[skip
] = _len(code
) - skip
99 emit(OPCODES
[MAX_UNTIL
])
101 emit(OPCODES
[MIN_UNTIL
])
102 elif op
is SUBPATTERN
:
106 # _compile_info(code, av[1], flags)
107 _compile(code
, av
[1], flags
)
111 elif op
in SUCCESS_CODES
:
113 elif op
in ASSERT_CODES
:
115 skip
= _len(code
); emit(0)
119 lo
, hi
= av
[1].getwidth()
121 raise error
, "look-behind requires fixed-width pattern"
122 emit(lo
) # look behind
123 _compile(code
, av
[1], flags
)
124 emit(OPCODES
[SUCCESS
])
125 code
[skip
] = _len(code
) - skip
128 skip
= _len(code
); emit(0)
129 _compile(code
, av
, flags
)
130 emit(OPCODES
[SUCCESS
])
131 code
[skip
] = _len(code
) - skip
134 if flags
& SRE_FLAG_MULTILINE
:
135 av
= AT_MULTILINE
.get(av
, av
)
136 if flags
& SRE_FLAG_LOCALE
:
137 av
= AT_LOCALE
.get(av
, av
)
138 elif flags
& SRE_FLAG_UNICODE
:
139 av
= AT_UNICODE
.get(av
, av
)
144 tailappend
= tail
.append
146 skip
= _len(code
); emit(0)
147 # _compile_info(code, av, flags)
148 _compile(code
, av
, flags
)
150 tailappend(_len(code
)); emit(0)
151 code
[skip
] = _len(code
) - skip
152 emit(0) # end of branch
154 code
[tail
] = _len(code
) - tail
157 if flags
& SRE_FLAG_LOCALE
:
159 elif flags
& SRE_FLAG_UNICODE
:
163 if flags
& SRE_FLAG_IGNORECASE
:
164 emit(OPCODES
[OP_IGNORE
[op
]])
168 elif op
is GROUPREF_EXISTS
:
171 skipyes
= _len(code
); emit(0)
172 _compile(code
, av
[1], flags
)
175 skipno
= _len(code
); emit(0)
176 code
[skipyes
] = _len(code
) - skipyes
+ 1
177 _compile(code
, av
[2], flags
)
178 code
[skipno
] = _len(code
) - skipno
180 code
[skipyes
] = _len(code
) - skipyes
+ 1
182 raise ValueError, ("unsupported operand type", op
)
184 def _compile_charset(charset
, flags
, code
, fixup
=None):
185 # compile charset subprogram
188 fixup
= _identityfunction
189 for op
, av
in _optimize_charset(charset
, fixup
):
200 elif op
is BIGCHARSET
:
203 if flags
& SRE_FLAG_LOCALE
:
204 emit(CHCODES
[CH_LOCALE
[av
]])
205 elif flags
& SRE_FLAG_UNICODE
:
206 emit(CHCODES
[CH_UNICODE
[av
]])
210 raise error
, "internal: unsupported set operator"
211 emit(OPCODES
[FAILURE
])
213 def _optimize_charset(charset
, fixup
):
214 # internal: optimize character set
216 outappend
= out
.append
219 for op
, av
in charset
:
223 charmap
[fixup(av
)] = 1
225 for i
in range(fixup(av
[0]), fixup(av
[1])+1):
228 # XXX: could append to charmap tail
229 return charset
# cannot compress
231 # character set contains unicode characters
232 return _optimize_unicode(charset
, fixup
)
233 # compress character map
236 runsappend
= runs
.append
252 outappend((LITERAL
, p
))
254 outappend((RANGE
, (p
, p
+n
-1)))
255 if len(out
) < len(charset
):
259 data
= _mk_bitmap(charmap
)
260 outappend((CHARSET
, data
))
264 def _mk_bitmap(bits
):
266 dataappend
= data
.append
267 if _sre
.CODESIZE
== 2:
281 # To represent a big charset, first a bitmap of all characters in the
282 # set is constructed. Then, this bitmap is sliced into chunks of 256
283 # characters, duplicate chunks are eliminated, and each chunk is
284 # given a number. In the compiled expression, the charset is
285 # represented by a 16-bit word sequence, consisting of one word for
286 # the number of different chunks, a sequence of 256 bytes (128 words)
287 # of chunk numbers indexed by their original chunk position, and a
288 # sequence of chunks (16 words each).
290 # Compression is normally good: in a typical charset, large ranges of
291 # Unicode will be either completely excluded (e.g. if only cyrillic
292 # letters are to be matched), or completely included (e.g. if large
293 # subranges of Kanji match). These ranges will be represented by
294 # chunks of all one-bits or all zero-bits.
296 # Matching can be also done efficiently: the more significant byte of
297 # the Unicode character is an index into the chunk number, and the
298 # less significant byte is a bit index in the chunk (just like the
301 # In UCS-4 mode, the BIGCHARSET opcode still supports only subsets
302 # of the basic multilingual plane; an efficient representation
303 # for all of UTF-16 has not yet been developed. This means,
304 # in particular, that negated charsets cannot be represented as
307 def _optimize_unicode(charset
, fixup
):
315 for op
, av
in charset
:
319 charmap
[fixup(av
)] = 1
321 for i
in xrange(fixup(av
[0]), fixup(av
[1])+1):
324 # XXX: could expand category
325 return charset
# cannot compress
330 if sys
.maxunicode
!= 65535:
331 # XXX: negation does not work with big charsets
333 for i
in xrange(65536):
334 charmap
[i
] = not charmap
[i
]
339 for i
in xrange(256):
340 chunk
= tuple(charmap
[i
*256:(i
+1)*256])
341 new
= comps
.setdefault(chunk
, block
)
345 data
= data
+ _mk_bitmap(chunk
)
347 if _sre
.CODESIZE
== 2:
351 # Convert block indices to byte array of 256 bytes
352 mapping
= array
.array('b', mapping
).tostring()
353 # Convert byte array to word array
354 mapping
= array
.array(code
, mapping
)
355 assert mapping
.itemsize
== _sre
.CODESIZE
356 header
= header
+ mapping
.tolist()
358 return [(BIGCHARSET
, data
)]
361 # check if av is a "simple" operator
362 lo
, hi
= av
[2].getwidth()
363 if lo
== 0 and hi
== MAXREPEAT
:
364 raise error
, "nothing to repeat"
365 return lo
== hi
== 1 and av
[2][0][0] != SUBPATTERN
367 def _compile_info(code
, pattern
, flags
):
368 # internal: compile an info block. in the current version,
369 # this contains min/max pattern width, and an optional literal
370 # prefix or a character map
371 lo
, hi
= pattern
.getwidth()
373 return # not worth it
374 # look for a literal prefix
376 prefixappend
= prefix
.append
378 charset
= [] # not used
379 charsetappend
= charset
.append
380 if not (flags
& SRE_FLAG_IGNORECASE
):
381 # look for literal prefix
382 for op
, av
in pattern
.data
:
384 if len(prefix
) == prefix_skip
:
385 prefix_skip
= prefix_skip
+ 1
387 elif op
is SUBPATTERN
and len(av
[1]) == 1:
395 # if no prefix, look for charset prefix
396 if not prefix
and pattern
.data
:
397 op
, av
= pattern
.data
[0]
398 if op
is SUBPATTERN
and av
[1]:
401 charsetappend((op
, av
))
431 ## print "*** PREFIX", prefix, prefix_skip
433 ## print "*** CHARSET", charset
437 skip
= len(code
); emit(0)
441 mask
= SRE_INFO_PREFIX
442 if len(prefix
) == prefix_skip
== len(pattern
.data
):
443 mask
= mask
+ SRE_INFO_LITERAL
445 mask
= mask
+ SRE_INFO_CHARSET
452 prefix
= prefix
[:MAXCODE
]
459 emit(len(prefix
)) # length
460 emit(prefix_skip
) # skip
462 # generate overlap table
463 table
= [-1] + ([0]*len(prefix
))
464 for i
in xrange(len(prefix
)):
465 table
[i
+1] = table
[i
]+1
466 while table
[i
+1] > 0 and prefix
[i
] != prefix
[table
[i
+1]-1]:
467 table
[i
+1] = table
[table
[i
+1]-1]+1
468 code
.extend(table
[1:]) # don't store first entry
470 _compile_charset(charset
, flags
, code
)
471 code
[skip
] = len(code
) - skip
476 STRING_TYPES
= (type(""),)
478 STRING_TYPES
= (type(""), type(unicode("")))
481 for tp
in STRING_TYPES
:
482 if isinstance(obj
, tp
):
488 flags
= p
.pattern
.flags | flags
492 _compile_info(code
, p
, flags
)
494 # compile the pattern
495 _compile(code
, p
.data
, flags
)
497 code
.append(OPCODES
[SUCCESS
])
501 def compile(p
, flags
=0):
502 # internal: convert pattern list to internal format
506 p
= sre_parse
.parse(p
, flags
)
510 code
= _code(p
, flags
)
514 # XXX: <fl> get rid of this limitation!
515 if p
.pattern
.groups
> 100:
516 raise AssertionError(
517 "sorry, but this version only supports 100 named groups"
520 # map in either direction
521 groupindex
= p
.pattern
.groupdict
522 indexgroup
= [None] * p
.pattern
.groups
523 for k
, i
in groupindex
.items():
527 pattern
, flags | p
.pattern
.flags
, code
,
529 groupindex
, indexgroup