1 /* Extended regular expression matching and search library.
2 Copyright (C) 2002-2022 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4 Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public
8 License as published by the Free Software Foundation; either
9 version 2.1 of the License, or (at your option) any later version.
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; if not, see
18 <https://www.gnu.org/licenses/>. */
20 static reg_errcode_t
match_ctx_init (re_match_context_t
*cache
, int eflags
,
22 static void match_ctx_clean (re_match_context_t
*mctx
);
23 static void match_ctx_free (re_match_context_t
*cache
);
24 static reg_errcode_t
match_ctx_add_entry (re_match_context_t
*cache
, Idx node
,
25 Idx str_idx
, Idx from
, Idx to
);
26 static Idx
search_cur_bkref_entry (const re_match_context_t
*mctx
, Idx str_idx
);
27 static reg_errcode_t
match_ctx_add_subtop (re_match_context_t
*mctx
, Idx node
,
29 static re_sub_match_last_t
* match_ctx_add_sublast (re_sub_match_top_t
*subtop
,
30 Idx node
, Idx str_idx
);
31 static void sift_ctx_init (re_sift_context_t
*sctx
, re_dfastate_t
**sifted_sts
,
32 re_dfastate_t
**limited_sts
, Idx last_node
,
34 static reg_errcode_t
re_search_internal (const regex_t
*preg
,
35 const char *string
, Idx length
,
36 Idx start
, Idx last_start
, Idx stop
,
37 size_t nmatch
, regmatch_t pmatch
[],
39 static regoff_t
re_search_2_stub (struct re_pattern_buffer
*bufp
,
40 const char *string1
, Idx length1
,
41 const char *string2
, Idx length2
,
42 Idx start
, regoff_t range
,
43 struct re_registers
*regs
,
44 Idx stop
, bool ret_len
);
45 static regoff_t
re_search_stub (struct re_pattern_buffer
*bufp
,
46 const char *string
, Idx length
, Idx start
,
47 regoff_t range
, Idx stop
,
48 struct re_registers
*regs
,
50 static unsigned re_copy_regs (struct re_registers
*regs
, regmatch_t
*pmatch
,
51 Idx nregs
, int regs_allocated
);
52 static reg_errcode_t
prune_impossible_nodes (re_match_context_t
*mctx
);
53 static Idx
check_matching (re_match_context_t
*mctx
, bool fl_longest_match
,
55 static Idx
check_halt_state_context (const re_match_context_t
*mctx
,
56 const re_dfastate_t
*state
, Idx idx
);
57 static void update_regs (const re_dfa_t
*dfa
, regmatch_t
*pmatch
,
58 regmatch_t
*prev_idx_match
, Idx cur_node
,
59 Idx cur_idx
, Idx nmatch
);
60 static reg_errcode_t
push_fail_stack (struct re_fail_stack_t
*fs
,
61 Idx str_idx
, Idx dest_node
, Idx nregs
,
62 regmatch_t
*regs
, regmatch_t
*prevregs
,
63 re_node_set
*eps_via_nodes
);
64 static reg_errcode_t
set_regs (const regex_t
*preg
,
65 const re_match_context_t
*mctx
,
66 size_t nmatch
, regmatch_t
*pmatch
,
68 static reg_errcode_t
free_fail_stack_return (struct re_fail_stack_t
*fs
);
71 static int sift_states_iter_mb (const re_match_context_t
*mctx
,
72 re_sift_context_t
*sctx
,
73 Idx node_idx
, Idx str_idx
, Idx max_str_idx
);
74 #endif /* RE_ENABLE_I18N */
75 static reg_errcode_t
sift_states_backward (const re_match_context_t
*mctx
,
76 re_sift_context_t
*sctx
);
77 static reg_errcode_t
build_sifted_states (const re_match_context_t
*mctx
,
78 re_sift_context_t
*sctx
, Idx str_idx
,
79 re_node_set
*cur_dest
);
80 static reg_errcode_t
update_cur_sifted_state (const re_match_context_t
*mctx
,
81 re_sift_context_t
*sctx
,
83 re_node_set
*dest_nodes
);
84 static reg_errcode_t
add_epsilon_src_nodes (const re_dfa_t
*dfa
,
85 re_node_set
*dest_nodes
,
86 const re_node_set
*candidates
);
87 static bool check_dst_limits (const re_match_context_t
*mctx
,
88 const re_node_set
*limits
,
89 Idx dst_node
, Idx dst_idx
, Idx src_node
,
91 static int check_dst_limits_calc_pos_1 (const re_match_context_t
*mctx
,
92 int boundaries
, Idx subexp_idx
,
93 Idx from_node
, Idx bkref_idx
);
94 static int check_dst_limits_calc_pos (const re_match_context_t
*mctx
,
95 Idx limit
, Idx subexp_idx
,
96 Idx node
, Idx str_idx
,
98 static reg_errcode_t
check_subexp_limits (const re_dfa_t
*dfa
,
99 re_node_set
*dest_nodes
,
100 const re_node_set
*candidates
,
102 struct re_backref_cache_entry
*bkref_ents
,
104 static reg_errcode_t
sift_states_bkref (const re_match_context_t
*mctx
,
105 re_sift_context_t
*sctx
,
106 Idx str_idx
, const re_node_set
*candidates
);
107 static reg_errcode_t
merge_state_array (const re_dfa_t
*dfa
,
109 re_dfastate_t
**src
, Idx num
);
110 static re_dfastate_t
*find_recover_state (reg_errcode_t
*err
,
111 re_match_context_t
*mctx
);
112 static re_dfastate_t
*transit_state (reg_errcode_t
*err
,
113 re_match_context_t
*mctx
,
114 re_dfastate_t
*state
);
115 static re_dfastate_t
*merge_state_with_log (reg_errcode_t
*err
,
116 re_match_context_t
*mctx
,
117 re_dfastate_t
*next_state
);
118 static reg_errcode_t
check_subexp_matching_top (re_match_context_t
*mctx
,
119 re_node_set
*cur_nodes
,
122 static re_dfastate_t
*transit_state_sb (reg_errcode_t
*err
,
123 re_match_context_t
*mctx
,
124 re_dfastate_t
*pstate
);
126 #ifdef RE_ENABLE_I18N
127 static reg_errcode_t
transit_state_mb (re_match_context_t
*mctx
,
128 re_dfastate_t
*pstate
);
129 #endif /* RE_ENABLE_I18N */
130 static reg_errcode_t
transit_state_bkref (re_match_context_t
*mctx
,
131 const re_node_set
*nodes
);
132 static reg_errcode_t
get_subexp (re_match_context_t
*mctx
,
133 Idx bkref_node
, Idx bkref_str_idx
);
134 static reg_errcode_t
get_subexp_sub (re_match_context_t
*mctx
,
135 const re_sub_match_top_t
*sub_top
,
136 re_sub_match_last_t
*sub_last
,
137 Idx bkref_node
, Idx bkref_str
);
138 static Idx
find_subexp_node (const re_dfa_t
*dfa
, const re_node_set
*nodes
,
139 Idx subexp_idx
, int type
);
140 static reg_errcode_t
check_arrival (re_match_context_t
*mctx
,
141 state_array_t
*path
, Idx top_node
,
142 Idx top_str
, Idx last_node
, Idx last_str
,
144 static reg_errcode_t
check_arrival_add_next_nodes (re_match_context_t
*mctx
,
146 re_node_set
*cur_nodes
,
147 re_node_set
*next_nodes
);
148 static reg_errcode_t
check_arrival_expand_ecl (const re_dfa_t
*dfa
,
149 re_node_set
*cur_nodes
,
150 Idx ex_subexp
, int type
);
151 static reg_errcode_t
check_arrival_expand_ecl_sub (const re_dfa_t
*dfa
,
152 re_node_set
*dst_nodes
,
153 Idx target
, Idx ex_subexp
,
155 static reg_errcode_t
expand_bkref_cache (re_match_context_t
*mctx
,
156 re_node_set
*cur_nodes
, Idx cur_str
,
157 Idx subexp_num
, int type
);
158 static bool build_trtable (const re_dfa_t
*dfa
, re_dfastate_t
*state
);
159 #ifdef RE_ENABLE_I18N
160 static int check_node_accept_bytes (const re_dfa_t
*dfa
, Idx node_idx
,
161 const re_string_t
*input
, Idx idx
);
163 static unsigned int find_collation_sequence_value (const unsigned char *mbs
,
166 #endif /* RE_ENABLE_I18N */
167 static Idx
group_nodes_into_DFAstates (const re_dfa_t
*dfa
,
168 const re_dfastate_t
*state
,
169 re_node_set
*states_node
,
170 bitset_t
*states_ch
);
171 static bool check_node_accept (const re_match_context_t
*mctx
,
172 const re_token_t
*node
, Idx idx
);
173 static reg_errcode_t
extend_buffers (re_match_context_t
*mctx
, int min_len
);
175 /* Entry point for POSIX code. */
177 /* regexec searches for a given pattern, specified by PREG, in the
180 If NMATCH is zero or REG_NOSUB was set in the cflags argument to
181 'regcomp', we ignore PMATCH. Otherwise, we assume PMATCH has at
182 least NMATCH elements, and we set them to the offsets of the
183 corresponding matched substrings.
185 EFLAGS specifies "execution flags" which affect matching: if
186 REG_NOTBOL is set, then ^ does not match at the beginning of the
187 string; if REG_NOTEOL is set, then $ does not match at the end.
189 Return 0 if a match is found, REG_NOMATCH if not, REG_BADPAT if
190 EFLAGS is invalid. */
193 regexec (const regex_t
*__restrict preg
, const char *__restrict string
,
194 size_t nmatch
, regmatch_t pmatch
[_REGEX_NELTS (nmatch
)], int eflags
)
198 re_dfa_t
*dfa
= preg
->buffer
;
200 if (eflags
& ~(REG_NOTBOL
| REG_NOTEOL
| REG_STARTEND
))
203 if (eflags
& REG_STARTEND
)
205 start
= pmatch
[0].rm_so
;
206 length
= pmatch
[0].rm_eo
;
211 length
= strlen (string
);
214 lock_lock (dfa
->lock
);
216 err
= re_search_internal (preg
, string
, length
, start
, length
,
217 length
, 0, NULL
, eflags
);
219 err
= re_search_internal (preg
, string
, length
, start
, length
,
220 length
, nmatch
, pmatch
, eflags
);
221 lock_unlock (dfa
->lock
);
222 return err
!= REG_NOERROR
;
226 libc_hidden_def (__regexec
)
228 # include <shlib-compat.h>
229 versioned_symbol (libc
, __regexec
, regexec
, GLIBC_2_3_4
);
231 # if SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)
232 __typeof__ (__regexec
) __compat_regexec
;
235 attribute_compat_text_section
236 __compat_regexec (const regex_t
*__restrict preg
,
237 const char *__restrict string
, size_t nmatch
,
238 regmatch_t pmatch
[_REGEX_NELTS (nmatch
)], int eflags
)
240 return regexec (preg
, string
, nmatch
, pmatch
,
241 eflags
& (REG_NOTBOL
| REG_NOTEOL
));
243 compat_symbol (libc
, __compat_regexec
, regexec
, GLIBC_2_0
);
247 /* Entry points for GNU code. */
249 /* re_match, re_search, re_match_2, re_search_2
251 The former two functions operate on STRING with length LENGTH,
252 while the later two operate on concatenation of STRING1 and STRING2
253 with lengths LENGTH1 and LENGTH2, respectively.
255 re_match() matches the compiled pattern in BUFP against the string,
256 starting at index START.
258 re_search() first tries matching at index START, then it tries to match
259 starting from index START + 1, and so on. The last start position tried
260 is START + RANGE. (Thus RANGE = 0 forces re_search to operate the same
263 The parameter STOP of re_{match,search}_2 specifies that no match exceeding
264 the first STOP characters of the concatenation of the strings should be
267 If REGS is not NULL, and BUFP->no_sub is not set, the offsets of the match
268 and all groups is stored in REGS. (For the "_2" variants, the offsets are
269 computed relative to the concatenation, not relative to the individual
272 On success, re_match* functions return the length of the match, re_search*
273 return the position of the start of the match. They return -1 on
274 match failure, -2 on error. */
277 re_match (struct re_pattern_buffer
*bufp
, const char *string
, Idx length
,
278 Idx start
, struct re_registers
*regs
)
280 return re_search_stub (bufp
, string
, length
, start
, 0, length
, regs
, true);
283 weak_alias (__re_match
, re_match
)
287 re_search (struct re_pattern_buffer
*bufp
, const char *string
, Idx length
,
288 Idx start
, regoff_t range
, struct re_registers
*regs
)
290 return re_search_stub (bufp
, string
, length
, start
, range
, length
, regs
,
294 weak_alias (__re_search
, re_search
)
298 re_match_2 (struct re_pattern_buffer
*bufp
, const char *string1
, Idx length1
,
299 const char *string2
, Idx length2
, Idx start
,
300 struct re_registers
*regs
, Idx stop
)
302 return re_search_2_stub (bufp
, string1
, length1
, string2
, length2
,
303 start
, 0, regs
, stop
, true);
306 weak_alias (__re_match_2
, re_match_2
)
310 re_search_2 (struct re_pattern_buffer
*bufp
, const char *string1
, Idx length1
,
311 const char *string2
, Idx length2
, Idx start
, regoff_t range
,
312 struct re_registers
*regs
, Idx stop
)
314 return re_search_2_stub (bufp
, string1
, length1
, string2
, length2
,
315 start
, range
, regs
, stop
, false);
318 weak_alias (__re_search_2
, re_search_2
)
322 re_search_2_stub (struct re_pattern_buffer
*bufp
, const char *string1
,
323 Idx length1
, const char *string2
, Idx length2
, Idx start
,
324 regoff_t range
, struct re_registers
*regs
,
325 Idx stop
, bool ret_len
)
332 if (__glibc_unlikely ((length1
< 0 || length2
< 0 || stop
< 0
333 || INT_ADD_WRAPV (length1
, length2
, &len
))))
336 /* Concatenate the strings. */
340 s
= re_malloc (char, len
);
342 if (__glibc_unlikely (s
== NULL
))
345 memcpy (__mempcpy (s
, string1
, length1
), string2
, length2
);
347 memcpy (s
, string1
, length1
);
348 memcpy (s
+ length1
, string2
, length2
);
357 rval
= re_search_stub (bufp
, str
, len
, start
, range
, stop
, regs
,
363 /* The parameters have the same meaning as those of re_search.
364 Additional parameters:
365 If RET_LEN is true the length of the match is returned (re_match style);
366 otherwise the position of the match is returned. */
369 re_search_stub (struct re_pattern_buffer
*bufp
, const char *string
, Idx length
,
370 Idx start
, regoff_t range
, Idx stop
, struct re_registers
*regs
,
373 reg_errcode_t result
;
378 re_dfa_t
*dfa
= bufp
->buffer
;
379 Idx last_start
= start
+ range
;
381 /* Check for out-of-range. */
382 if (__glibc_unlikely (start
< 0 || start
> length
))
384 if (__glibc_unlikely (length
< last_start
385 || (0 <= range
&& last_start
< start
)))
387 else if (__glibc_unlikely (last_start
< 0
388 || (range
< 0 && start
<= last_start
)))
391 lock_lock (dfa
->lock
);
393 eflags
|= (bufp
->not_bol
) ? REG_NOTBOL
: 0;
394 eflags
|= (bufp
->not_eol
) ? REG_NOTEOL
: 0;
396 /* Compile fastmap if we haven't yet. */
397 if (start
< last_start
&& bufp
->fastmap
!= NULL
&& !bufp
->fastmap_accurate
)
398 re_compile_fastmap (bufp
);
400 if (__glibc_unlikely (bufp
->no_sub
))
403 /* We need at least 1 register. */
406 else if (__glibc_unlikely (bufp
->regs_allocated
== REGS_FIXED
407 && regs
->num_regs
<= bufp
->re_nsub
))
409 nregs
= regs
->num_regs
;
410 if (__glibc_unlikely (nregs
< 1))
412 /* Nothing can be copied to regs. */
418 nregs
= bufp
->re_nsub
+ 1;
419 pmatch
= re_malloc (regmatch_t
, nregs
);
420 if (__glibc_unlikely (pmatch
== NULL
))
426 result
= re_search_internal (bufp
, string
, length
, start
, last_start
, stop
,
427 nregs
, pmatch
, eflags
);
431 /* I hope we needn't fill their regs with -1's when no match was found. */
432 if (result
!= REG_NOERROR
)
433 rval
= result
== REG_NOMATCH
? -1 : -2;
434 else if (regs
!= NULL
)
436 /* If caller wants register contents data back, copy them. */
437 bufp
->regs_allocated
= re_copy_regs (regs
, pmatch
, nregs
,
438 bufp
->regs_allocated
);
439 if (__glibc_unlikely (bufp
->regs_allocated
== REGS_UNALLOCATED
))
443 if (__glibc_likely (rval
== 0))
447 DEBUG_ASSERT (pmatch
[0].rm_so
== start
);
448 rval
= pmatch
[0].rm_eo
- start
;
451 rval
= pmatch
[0].rm_so
;
455 lock_unlock (dfa
->lock
);
460 re_copy_regs (struct re_registers
*regs
, regmatch_t
*pmatch
, Idx nregs
,
463 int rval
= REGS_REALLOCATE
;
465 Idx need_regs
= nregs
+ 1;
466 /* We need one extra element beyond 'num_regs' for the '-1' marker GNU code
469 /* Have the register data arrays been allocated? */
470 if (regs_allocated
== REGS_UNALLOCATED
)
471 { /* No. So allocate them with malloc. */
472 regs
->start
= re_malloc (regoff_t
, need_regs
);
473 if (__glibc_unlikely (regs
->start
== NULL
))
474 return REGS_UNALLOCATED
;
475 regs
->end
= re_malloc (regoff_t
, need_regs
);
476 if (__glibc_unlikely (regs
->end
== NULL
))
478 re_free (regs
->start
);
479 return REGS_UNALLOCATED
;
481 regs
->num_regs
= need_regs
;
483 else if (regs_allocated
== REGS_REALLOCATE
)
484 { /* Yes. If we need more elements than were already
485 allocated, reallocate them. If we need fewer, just
487 if (__glibc_unlikely (need_regs
> regs
->num_regs
))
489 regoff_t
*new_start
= re_realloc (regs
->start
, regoff_t
, need_regs
);
491 if (__glibc_unlikely (new_start
== NULL
))
492 return REGS_UNALLOCATED
;
493 new_end
= re_realloc (regs
->end
, regoff_t
, need_regs
);
494 if (__glibc_unlikely (new_end
== NULL
))
497 return REGS_UNALLOCATED
;
499 regs
->start
= new_start
;
501 regs
->num_regs
= need_regs
;
506 DEBUG_ASSERT (regs_allocated
== REGS_FIXED
);
507 /* This function may not be called with REGS_FIXED and nregs too big. */
508 DEBUG_ASSERT (nregs
<= regs
->num_regs
);
513 for (i
= 0; i
< nregs
; ++i
)
515 regs
->start
[i
] = pmatch
[i
].rm_so
;
516 regs
->end
[i
] = pmatch
[i
].rm_eo
;
518 for ( ; i
< regs
->num_regs
; ++i
)
519 regs
->start
[i
] = regs
->end
[i
] = -1;
524 /* Set REGS to hold NUM_REGS registers, storing them in STARTS and
525 ENDS. Subsequent matches using PATTERN_BUFFER and REGS will use
526 this memory for recording register information. STARTS and ENDS
527 must be allocated using the malloc library routine, and must each
528 be at least NUM_REGS * sizeof (regoff_t) bytes long.
530 If NUM_REGS == 0, then subsequent matches should allocate their own
533 Unless this function is called, the first search or match using
534 PATTERN_BUFFER will allocate its own register data, without
535 freeing the old data. */
538 re_set_registers (struct re_pattern_buffer
*bufp
, struct re_registers
*regs
,
539 __re_size_t num_regs
, regoff_t
*starts
, regoff_t
*ends
)
543 bufp
->regs_allocated
= REGS_REALLOCATE
;
544 regs
->num_regs
= num_regs
;
545 regs
->start
= starts
;
550 bufp
->regs_allocated
= REGS_UNALLOCATED
;
552 regs
->start
= regs
->end
= NULL
;
556 weak_alias (__re_set_registers
, re_set_registers
)
559 /* Entry points compatible with 4.2 BSD regex library. We don't define
560 them unless specifically requested. */
562 #if defined _REGEX_RE_COMP || defined _LIBC
567 re_exec (const char *s
)
569 return 0 == regexec (&re_comp_buf
, s
, 0, NULL
, 0);
571 #endif /* _REGEX_RE_COMP */
573 /* Internal entry point. */
575 /* Searches for a compiled pattern PREG in the string STRING, whose
576 length is LENGTH. NMATCH, PMATCH, and EFLAGS have the same
577 meaning as with regexec. LAST_START is START + RANGE, where
578 START and RANGE have the same meaning as with re_search.
579 Return REG_NOERROR if we find a match, and REG_NOMATCH if not,
580 otherwise return the error code.
581 Note: We assume front end functions already check ranges.
582 (0 <= LAST_START && LAST_START <= LENGTH) */
585 __attribute_warn_unused_result__
586 re_search_internal (const regex_t
*preg
, const char *string
, Idx length
,
587 Idx start
, Idx last_start
, Idx stop
, size_t nmatch
,
588 regmatch_t pmatch
[], int eflags
)
591 const re_dfa_t
*dfa
= preg
->buffer
;
592 Idx left_lim
, right_lim
;
594 bool fl_longest_match
;
601 re_match_context_t mctx
= { .dfa
= dfa
};
602 char *fastmap
= ((preg
->fastmap
!= NULL
&& preg
->fastmap_accurate
603 && start
!= last_start
&& !preg
->can_be_null
)
604 ? preg
->fastmap
: NULL
);
605 RE_TRANSLATE_TYPE t
= preg
->translate
;
607 extra_nmatch
= (nmatch
> preg
->re_nsub
) ? nmatch
- (preg
->re_nsub
+ 1) : 0;
608 nmatch
-= extra_nmatch
;
610 /* Check if the DFA haven't been compiled. */
611 if (__glibc_unlikely (preg
->used
== 0 || dfa
->init_state
== NULL
612 || dfa
->init_state_word
== NULL
613 || dfa
->init_state_nl
== NULL
614 || dfa
->init_state_begbuf
== NULL
))
617 /* We assume front-end functions already check them. */
618 DEBUG_ASSERT (0 <= last_start
&& last_start
<= length
);
620 /* If initial states with non-begbuf contexts have no elements,
621 the regex must be anchored. If preg->newline_anchor is set,
622 we'll never use init_state_nl, so do not check it. */
623 if (dfa
->init_state
->nodes
.nelem
== 0
624 && dfa
->init_state_word
->nodes
.nelem
== 0
625 && (dfa
->init_state_nl
->nodes
.nelem
== 0
626 || !preg
->newline_anchor
))
628 if (start
!= 0 && last_start
!= 0)
630 start
= last_start
= 0;
633 /* We must check the longest matching, if nmatch > 0. */
634 fl_longest_match
= (nmatch
!= 0 || dfa
->nbackref
);
636 err
= re_string_allocate (&mctx
.input
, string
, length
, dfa
->nodes_len
+ 1,
637 preg
->translate
, (preg
->syntax
& RE_ICASE
) != 0,
639 if (__glibc_unlikely (err
!= REG_NOERROR
))
641 mctx
.input
.stop
= stop
;
642 mctx
.input
.raw_stop
= stop
;
643 mctx
.input
.newline_anchor
= preg
->newline_anchor
;
645 err
= match_ctx_init (&mctx
, eflags
, dfa
->nbackref
* 2);
646 if (__glibc_unlikely (err
!= REG_NOERROR
))
649 /* We will log all the DFA states through which the dfa pass,
650 if nmatch > 1, or this dfa has "multibyte node", which is a
651 back-reference or a node which can accept multibyte character or
652 multi character collating element. */
653 if (nmatch
> 1 || dfa
->has_mb_node
)
655 /* Avoid overflow. */
656 if (__glibc_unlikely ((MIN (IDX_MAX
, SIZE_MAX
/ sizeof (re_dfastate_t
*))
657 <= mctx
.input
.bufs_len
)))
663 mctx
.state_log
= re_malloc (re_dfastate_t
*, mctx
.input
.bufs_len
+ 1);
664 if (__glibc_unlikely (mctx
.state_log
== NULL
))
672 mctx
.input
.tip_context
= (eflags
& REG_NOTBOL
) ? CONTEXT_BEGBUF
673 : CONTEXT_NEWLINE
| CONTEXT_BEGBUF
;
675 /* Check incrementally whether the input string matches. */
676 incr
= (last_start
< start
) ? -1 : 1;
677 left_lim
= (last_start
< start
) ? last_start
: start
;
678 right_lim
= (last_start
< start
) ? start
: last_start
;
679 sb
= dfa
->mb_cur_max
== 1;
682 ? ((sb
|| !(preg
->syntax
& RE_ICASE
|| t
) ? 4 : 0)
683 | (start
<= last_start
? 2 : 0)
684 | (t
!= NULL
? 1 : 0))
687 for (;; match_first
+= incr
)
690 if (match_first
< left_lim
|| right_lim
< match_first
)
693 /* Advance as rapidly as possible through the string, until we
694 find a plausible place to start matching. This may be done
695 with varying efficiency, so there are various possibilities:
696 only the most common of them are specialized, in order to
697 save on code size. We use a switch statement for speed. */
705 /* Fastmap with single-byte translation, match forward. */
706 while (__glibc_likely (match_first
< right_lim
)
707 && !fastmap
[t
[(unsigned char) string
[match_first
]]])
709 goto forward_match_found_start_or_reached_end
;
712 /* Fastmap without translation, match forward. */
713 while (__glibc_likely (match_first
< right_lim
)
714 && !fastmap
[(unsigned char) string
[match_first
]])
717 forward_match_found_start_or_reached_end
:
718 if (__glibc_unlikely (match_first
== right_lim
))
720 ch
= match_first
>= length
721 ? 0 : (unsigned char) string
[match_first
];
722 if (!fastmap
[t
? t
[ch
] : ch
])
729 /* Fastmap without multi-byte translation, match backwards. */
730 while (match_first
>= left_lim
)
732 ch
= match_first
>= length
733 ? 0 : (unsigned char) string
[match_first
];
734 if (fastmap
[t
? t
[ch
] : ch
])
738 if (match_first
< left_lim
)
743 /* In this case, we can't determine easily the current byte,
744 since it might be a component byte of a multibyte
745 character. Then we use the constructed buffer instead. */
748 /* If MATCH_FIRST is out of the valid range, reconstruct the
750 __re_size_t offset
= match_first
- mctx
.input
.raw_mbs_idx
;
751 if (__glibc_unlikely (offset
752 >= (__re_size_t
) mctx
.input
.valid_raw_len
))
754 err
= re_string_reconstruct (&mctx
.input
, match_first
,
756 if (__glibc_unlikely (err
!= REG_NOERROR
))
759 offset
= match_first
- mctx
.input
.raw_mbs_idx
;
761 /* Use buffer byte if OFFSET is in buffer, otherwise '\0'. */
762 ch
= (offset
< mctx
.input
.valid_len
763 ? re_string_byte_at (&mctx
.input
, offset
) : 0);
767 if (match_first
< left_lim
|| match_first
> right_lim
)
776 /* Reconstruct the buffers so that the matcher can assume that
777 the matching starts from the beginning of the buffer. */
778 err
= re_string_reconstruct (&mctx
.input
, match_first
, eflags
);
779 if (__glibc_unlikely (err
!= REG_NOERROR
))
782 #ifdef RE_ENABLE_I18N
783 /* Don't consider this char as a possible match start if it part,
784 yet isn't the head, of a multibyte character. */
785 if (!sb
&& !re_string_first_byte (&mctx
.input
, 0))
789 /* It seems to be appropriate one, then use the matcher. */
790 /* We assume that the matching starts from 0. */
791 mctx
.state_log_top
= mctx
.nbkref_ents
= mctx
.max_mb_elem_len
= 0;
792 match_last
= check_matching (&mctx
, fl_longest_match
,
793 start
<= last_start
? &match_first
: NULL
);
794 if (match_last
!= -1)
796 if (__glibc_unlikely (match_last
== -2))
803 mctx
.match_last
= match_last
;
804 if ((!preg
->no_sub
&& nmatch
> 1) || dfa
->nbackref
)
806 re_dfastate_t
*pstate
= mctx
.state_log
[match_last
];
807 mctx
.last_node
= check_halt_state_context (&mctx
, pstate
,
810 if ((!preg
->no_sub
&& nmatch
> 1 && dfa
->has_plural_match
)
813 err
= prune_impossible_nodes (&mctx
);
814 if (err
== REG_NOERROR
)
816 if (__glibc_unlikely (err
!= REG_NOMATCH
))
821 break; /* We found a match. */
825 match_ctx_clean (&mctx
);
828 DEBUG_ASSERT (match_last
!= -1);
829 DEBUG_ASSERT (err
== REG_NOERROR
);
831 /* Set pmatch[] if we need. */
836 /* Initialize registers. */
837 for (reg_idx
= 1; reg_idx
< nmatch
; ++reg_idx
)
838 pmatch
[reg_idx
].rm_so
= pmatch
[reg_idx
].rm_eo
= -1;
840 /* Set the points where matching start/end. */
842 pmatch
[0].rm_eo
= mctx
.match_last
;
843 /* FIXME: This function should fail if mctx.match_last exceeds
844 the maximum possible regoff_t value. We need a new error
845 code REG_OVERFLOW. */
847 if (!preg
->no_sub
&& nmatch
> 1)
849 err
= set_regs (preg
, &mctx
, nmatch
, pmatch
,
850 dfa
->has_plural_match
&& dfa
->nbackref
> 0);
851 if (__glibc_unlikely (err
!= REG_NOERROR
))
855 /* At last, add the offset to each register, since we slid
856 the buffers so that we could assume that the matching starts
858 for (reg_idx
= 0; reg_idx
< nmatch
; ++reg_idx
)
859 if (pmatch
[reg_idx
].rm_so
!= -1)
861 #ifdef RE_ENABLE_I18N
862 if (__glibc_unlikely (mctx
.input
.offsets_needed
!= 0))
864 pmatch
[reg_idx
].rm_so
=
865 (pmatch
[reg_idx
].rm_so
== mctx
.input
.valid_len
866 ? mctx
.input
.valid_raw_len
867 : mctx
.input
.offsets
[pmatch
[reg_idx
].rm_so
]);
868 pmatch
[reg_idx
].rm_eo
=
869 (pmatch
[reg_idx
].rm_eo
== mctx
.input
.valid_len
870 ? mctx
.input
.valid_raw_len
871 : mctx
.input
.offsets
[pmatch
[reg_idx
].rm_eo
]);
874 DEBUG_ASSERT (mctx
.input
.offsets_needed
== 0);
876 pmatch
[reg_idx
].rm_so
+= match_first
;
877 pmatch
[reg_idx
].rm_eo
+= match_first
;
879 for (reg_idx
= 0; reg_idx
< extra_nmatch
; ++reg_idx
)
881 pmatch
[nmatch
+ reg_idx
].rm_so
= -1;
882 pmatch
[nmatch
+ reg_idx
].rm_eo
= -1;
886 for (reg_idx
= 0; reg_idx
+ 1 < nmatch
; reg_idx
++)
887 if (dfa
->subexp_map
[reg_idx
] != reg_idx
)
889 pmatch
[reg_idx
+ 1].rm_so
890 = pmatch
[dfa
->subexp_map
[reg_idx
] + 1].rm_so
;
891 pmatch
[reg_idx
+ 1].rm_eo
892 = pmatch
[dfa
->subexp_map
[reg_idx
] + 1].rm_eo
;
897 re_free (mctx
.state_log
);
899 match_ctx_free (&mctx
);
900 re_string_destruct (&mctx
.input
);
905 __attribute_warn_unused_result__
906 prune_impossible_nodes (re_match_context_t
*mctx
)
908 const re_dfa_t
*const dfa
= mctx
->dfa
;
909 Idx halt_node
, match_last
;
911 re_dfastate_t
**sifted_states
;
912 re_dfastate_t
**lim_states
= NULL
;
913 re_sift_context_t sctx
;
914 DEBUG_ASSERT (mctx
->state_log
!= NULL
);
915 match_last
= mctx
->match_last
;
916 halt_node
= mctx
->last_node
;
918 /* Avoid overflow. */
919 if (__glibc_unlikely (MIN (IDX_MAX
, SIZE_MAX
/ sizeof (re_dfastate_t
*))
923 sifted_states
= re_malloc (re_dfastate_t
*, match_last
+ 1);
924 if (__glibc_unlikely (sifted_states
== NULL
))
931 lim_states
= re_malloc (re_dfastate_t
*, match_last
+ 1);
932 if (__glibc_unlikely (lim_states
== NULL
))
939 memset (lim_states
, '\0',
940 sizeof (re_dfastate_t
*) * (match_last
+ 1));
941 sift_ctx_init (&sctx
, sifted_states
, lim_states
, halt_node
,
943 ret
= sift_states_backward (mctx
, &sctx
);
944 re_node_set_free (&sctx
.limits
);
945 if (__glibc_unlikely (ret
!= REG_NOERROR
))
947 if (sifted_states
[0] != NULL
|| lim_states
[0] != NULL
)
957 } while (mctx
->state_log
[match_last
] == NULL
958 || !mctx
->state_log
[match_last
]->halt
);
959 halt_node
= check_halt_state_context (mctx
,
960 mctx
->state_log
[match_last
],
963 ret
= merge_state_array (dfa
, sifted_states
, lim_states
,
965 re_free (lim_states
);
967 if (__glibc_unlikely (ret
!= REG_NOERROR
))
972 sift_ctx_init (&sctx
, sifted_states
, lim_states
, halt_node
, match_last
);
973 ret
= sift_states_backward (mctx
, &sctx
);
974 re_node_set_free (&sctx
.limits
);
975 if (__glibc_unlikely (ret
!= REG_NOERROR
))
977 if (sifted_states
[0] == NULL
)
983 re_free (mctx
->state_log
);
984 mctx
->state_log
= sifted_states
;
985 sifted_states
= NULL
;
986 mctx
->last_node
= halt_node
;
987 mctx
->match_last
= match_last
;
990 re_free (sifted_states
);
991 re_free (lim_states
);
995 /* Acquire an initial state and return it.
996 We must select appropriate initial state depending on the context,
997 since initial states may have constraints like "\<", "^", etc.. */
999 static inline re_dfastate_t
*
1000 __attribute__ ((always_inline
))
1001 acquire_init_state_context (reg_errcode_t
*err
, const re_match_context_t
*mctx
,
1004 const re_dfa_t
*const dfa
= mctx
->dfa
;
1005 if (dfa
->init_state
->has_constraint
)
1007 unsigned int context
;
1008 context
= re_string_context_at (&mctx
->input
, idx
- 1, mctx
->eflags
);
1009 if (IS_WORD_CONTEXT (context
))
1010 return dfa
->init_state_word
;
1011 else if (IS_ORDINARY_CONTEXT (context
))
1012 return dfa
->init_state
;
1013 else if (IS_BEGBUF_CONTEXT (context
) && IS_NEWLINE_CONTEXT (context
))
1014 return dfa
->init_state_begbuf
;
1015 else if (IS_NEWLINE_CONTEXT (context
))
1016 return dfa
->init_state_nl
;
1017 else if (IS_BEGBUF_CONTEXT (context
))
1019 /* It is relatively rare case, then calculate on demand. */
1020 return re_acquire_state_context (err
, dfa
,
1021 dfa
->init_state
->entrance_nodes
,
1025 /* Must not happen? */
1026 return dfa
->init_state
;
1029 return dfa
->init_state
;
1032 /* Check whether the regular expression match input string INPUT or not,
1033 and return the index where the matching end. Return -1 if
1034 there is no match, and return -2 in case of an error.
1035 FL_LONGEST_MATCH means we want the POSIX longest matching.
1036 If P_MATCH_FIRST is not NULL, and the match fails, it is set to the
1037 next place where we may want to try matching.
1038 Note that the matcher assumes that the matching starts from the current
1039 index of the buffer. */
1042 __attribute_warn_unused_result__
1043 check_matching (re_match_context_t
*mctx
, bool fl_longest_match
,
1046 const re_dfa_t
*const dfa
= mctx
->dfa
;
1049 Idx match_last
= -1;
1050 Idx cur_str_idx
= re_string_cur_idx (&mctx
->input
);
1051 re_dfastate_t
*cur_state
;
1052 bool at_init_state
= p_match_first
!= NULL
;
1053 Idx next_start_idx
= cur_str_idx
;
1056 cur_state
= acquire_init_state_context (&err
, mctx
, cur_str_idx
);
1057 /* An initial state must not be NULL (invalid). */
1058 if (__glibc_unlikely (cur_state
== NULL
))
1060 DEBUG_ASSERT (err
== REG_ESPACE
);
1064 if (mctx
->state_log
!= NULL
)
1066 mctx
->state_log
[cur_str_idx
] = cur_state
;
1068 /* Check OP_OPEN_SUBEXP in the initial state in case that we use them
1069 later. E.g. Processing back references. */
1070 if (__glibc_unlikely (dfa
->nbackref
))
1072 at_init_state
= false;
1073 err
= check_subexp_matching_top (mctx
, &cur_state
->nodes
, 0);
1074 if (__glibc_unlikely (err
!= REG_NOERROR
))
1077 if (cur_state
->has_backref
)
1079 err
= transit_state_bkref (mctx
, &cur_state
->nodes
);
1080 if (__glibc_unlikely (err
!= REG_NOERROR
))
1086 /* If the RE accepts NULL string. */
1087 if (__glibc_unlikely (cur_state
->halt
))
1089 if (!cur_state
->has_constraint
1090 || check_halt_state_context (mctx
, cur_state
, cur_str_idx
))
1092 if (!fl_longest_match
)
1096 match_last
= cur_str_idx
;
1102 while (!re_string_eoi (&mctx
->input
))
1104 re_dfastate_t
*old_state
= cur_state
;
1105 Idx next_char_idx
= re_string_cur_idx (&mctx
->input
) + 1;
1107 if ((__glibc_unlikely (next_char_idx
>= mctx
->input
.bufs_len
)
1108 && mctx
->input
.bufs_len
< mctx
->input
.len
)
1109 || (__glibc_unlikely (next_char_idx
>= mctx
->input
.valid_len
)
1110 && mctx
->input
.valid_len
< mctx
->input
.len
))
1112 err
= extend_buffers (mctx
, next_char_idx
+ 1);
1113 if (__glibc_unlikely (err
!= REG_NOERROR
))
1115 DEBUG_ASSERT (err
== REG_ESPACE
);
1120 cur_state
= transit_state (&err
, mctx
, cur_state
);
1121 if (mctx
->state_log
!= NULL
)
1122 cur_state
= merge_state_with_log (&err
, mctx
, cur_state
);
1124 if (cur_state
== NULL
)
1126 /* Reached the invalid state or an error. Try to recover a valid
1127 state using the state log, if available and if we have not
1128 already found a valid (even if not the longest) match. */
1129 if (__glibc_unlikely (err
!= REG_NOERROR
))
1132 if (mctx
->state_log
== NULL
1133 || (match
&& !fl_longest_match
)
1134 || (cur_state
= find_recover_state (&err
, mctx
)) == NULL
)
1138 if (__glibc_unlikely (at_init_state
))
1140 if (old_state
== cur_state
)
1141 next_start_idx
= next_char_idx
;
1143 at_init_state
= false;
1146 if (cur_state
->halt
)
1148 /* Reached a halt state.
1149 Check the halt state can satisfy the current context. */
1150 if (!cur_state
->has_constraint
1151 || check_halt_state_context (mctx
, cur_state
,
1152 re_string_cur_idx (&mctx
->input
)))
1154 /* We found an appropriate halt state. */
1155 match_last
= re_string_cur_idx (&mctx
->input
);
1158 /* We found a match, do not modify match_first below. */
1159 p_match_first
= NULL
;
1160 if (!fl_longest_match
)
1167 *p_match_first
+= next_start_idx
;
1172 /* Check NODE match the current context. */
1175 check_halt_node_context (const re_dfa_t
*dfa
, Idx node
, unsigned int context
)
1177 re_token_type_t type
= dfa
->nodes
[node
].type
;
1178 unsigned int constraint
= dfa
->nodes
[node
].constraint
;
1179 if (type
!= END_OF_RE
)
1183 if (NOT_SATISFY_NEXT_CONSTRAINT (constraint
, context
))
1188 /* Check the halt state STATE match the current context.
1189 Return 0 if not match, if the node, STATE has, is a halt node and
1190 match the context, return the node. */
1193 check_halt_state_context (const re_match_context_t
*mctx
,
1194 const re_dfastate_t
*state
, Idx idx
)
1197 unsigned int context
;
1198 DEBUG_ASSERT (state
->halt
);
1199 context
= re_string_context_at (&mctx
->input
, idx
, mctx
->eflags
);
1200 for (i
= 0; i
< state
->nodes
.nelem
; ++i
)
1201 if (check_halt_node_context (mctx
->dfa
, state
->nodes
.elems
[i
], context
))
1202 return state
->nodes
.elems
[i
];
1206 /* Compute the next node to which "NFA" transit from NODE("NFA" is a NFA
1207 corresponding to the DFA).
1208 Return the destination node, and update EPS_VIA_NODES;
1209 return -1 on match failure, -2 on error. */
1212 proceed_next_node (const re_match_context_t
*mctx
, Idx nregs
, regmatch_t
*regs
,
1213 regmatch_t
*prevregs
,
1214 Idx
*pidx
, Idx node
, re_node_set
*eps_via_nodes
,
1215 struct re_fail_stack_t
*fs
)
1217 const re_dfa_t
*const dfa
= mctx
->dfa
;
1218 if (IS_EPSILON_NODE (dfa
->nodes
[node
].type
))
1220 re_node_set
*cur_nodes
= &mctx
->state_log
[*pidx
]->nodes
;
1221 re_node_set
*edests
= &dfa
->edests
[node
];
1223 if (! re_node_set_contains (eps_via_nodes
, node
))
1225 bool ok
= re_node_set_insert (eps_via_nodes
, node
);
1226 if (__glibc_unlikely (! ok
))
1230 /* Pick a valid destination, or return -1 if none is found. */
1232 for (Idx i
= 0; i
< edests
->nelem
; i
++)
1234 Idx candidate
= edests
->elems
[i
];
1235 if (!re_node_set_contains (cur_nodes
, candidate
))
1237 if (dest_node
== -1)
1238 dest_node
= candidate
;
1242 /* In order to avoid infinite loop like "(a*)*", return the second
1243 epsilon-transition if the first was already considered. */
1244 if (re_node_set_contains (eps_via_nodes
, dest_node
))
1247 /* Otherwise, push the second epsilon-transition on the fail stack. */
1249 && push_fail_stack (fs
, *pidx
, candidate
, nregs
, regs
,
1250 prevregs
, eps_via_nodes
))
1253 /* We know we are going to exit. */
1262 re_token_type_t type
= dfa
->nodes
[node
].type
;
1264 #ifdef RE_ENABLE_I18N
1265 if (dfa
->nodes
[node
].accept_mb
)
1266 naccepted
= check_node_accept_bytes (dfa
, node
, &mctx
->input
, *pidx
);
1268 #endif /* RE_ENABLE_I18N */
1269 if (type
== OP_BACK_REF
)
1271 Idx subexp_idx
= dfa
->nodes
[node
].opr
.idx
+ 1;
1272 if (subexp_idx
< nregs
)
1273 naccepted
= regs
[subexp_idx
].rm_eo
- regs
[subexp_idx
].rm_so
;
1276 if (subexp_idx
>= nregs
1277 || regs
[subexp_idx
].rm_so
== -1
1278 || regs
[subexp_idx
].rm_eo
== -1)
1282 char *buf
= (char *) re_string_get_buffer (&mctx
->input
);
1283 if (mctx
->input
.valid_len
- *pidx
< naccepted
1284 || (memcmp (buf
+ regs
[subexp_idx
].rm_so
, buf
+ *pidx
,
1294 bool ok
= re_node_set_insert (eps_via_nodes
, node
);
1295 if (__glibc_unlikely (! ok
))
1297 dest_node
= dfa
->edests
[node
].elems
[0];
1298 if (re_node_set_contains (&mctx
->state_log
[*pidx
]->nodes
,
1305 || check_node_accept (mctx
, dfa
->nodes
+ node
, *pidx
))
1307 Idx dest_node
= dfa
->nexts
[node
];
1308 *pidx
= (naccepted
== 0) ? *pidx
+ 1 : *pidx
+ naccepted
;
1309 if (fs
&& (*pidx
> mctx
->match_last
|| mctx
->state_log
[*pidx
] == NULL
1310 || !re_node_set_contains (&mctx
->state_log
[*pidx
]->nodes
,
1313 re_node_set_empty (eps_via_nodes
);
1320 static reg_errcode_t
1321 __attribute_warn_unused_result__
1322 push_fail_stack (struct re_fail_stack_t
*fs
, Idx str_idx
, Idx dest_node
,
1323 Idx nregs
, regmatch_t
*regs
, regmatch_t
*prevregs
,
1324 re_node_set
*eps_via_nodes
)
1327 Idx num
= fs
->num
++;
1328 if (fs
->num
== fs
->alloc
)
1330 struct re_fail_stack_ent_t
*new_array
;
1331 new_array
= re_realloc (fs
->stack
, struct re_fail_stack_ent_t
,
1333 if (new_array
== NULL
)
1336 fs
->stack
= new_array
;
1338 fs
->stack
[num
].idx
= str_idx
;
1339 fs
->stack
[num
].node
= dest_node
;
1340 fs
->stack
[num
].regs
= re_malloc (regmatch_t
, 2 * nregs
);
1341 if (fs
->stack
[num
].regs
== NULL
)
1343 memcpy (fs
->stack
[num
].regs
, regs
, sizeof (regmatch_t
) * nregs
);
1344 memcpy (fs
->stack
[num
].regs
+ nregs
, prevregs
, sizeof (regmatch_t
) * nregs
);
1345 err
= re_node_set_init_copy (&fs
->stack
[num
].eps_via_nodes
, eps_via_nodes
);
1350 pop_fail_stack (struct re_fail_stack_t
*fs
, Idx
*pidx
, Idx nregs
,
1351 regmatch_t
*regs
, regmatch_t
*prevregs
,
1352 re_node_set
*eps_via_nodes
)
1354 if (fs
== NULL
|| fs
->num
== 0)
1356 Idx num
= --fs
->num
;
1357 *pidx
= fs
->stack
[num
].idx
;
1358 memcpy (regs
, fs
->stack
[num
].regs
, sizeof (regmatch_t
) * nregs
);
1359 memcpy (prevregs
, fs
->stack
[num
].regs
+ nregs
, sizeof (regmatch_t
) * nregs
);
1360 re_node_set_free (eps_via_nodes
);
1361 re_free (fs
->stack
[num
].regs
);
1362 *eps_via_nodes
= fs
->stack
[num
].eps_via_nodes
;
1363 DEBUG_ASSERT (0 <= fs
->stack
[num
].node
);
1364 return fs
->stack
[num
].node
;
1368 #define DYNARRAY_STRUCT regmatch_list
1369 #define DYNARRAY_ELEMENT regmatch_t
1370 #define DYNARRAY_PREFIX regmatch_list_
1371 #include <malloc/dynarray-skeleton.c>
1373 /* Set the positions where the subexpressions are starts/ends to registers
1375 Note: We assume that pmatch[0] is already set, and
1376 pmatch[i].rm_so == pmatch[i].rm_eo == -1 for 0 < i < nmatch. */
1378 static reg_errcode_t
1379 __attribute_warn_unused_result__
1380 set_regs (const regex_t
*preg
, const re_match_context_t
*mctx
, size_t nmatch
,
1381 regmatch_t
*pmatch
, bool fl_backtrack
)
1383 const re_dfa_t
*dfa
= preg
->buffer
;
1385 re_node_set eps_via_nodes
;
1386 struct re_fail_stack_t
*fs
;
1387 struct re_fail_stack_t fs_body
= { 0, 2, NULL
};
1388 struct regmatch_list prev_match
;
1389 regmatch_list_init (&prev_match
);
1391 DEBUG_ASSERT (nmatch
> 1);
1392 DEBUG_ASSERT (mctx
->state_log
!= NULL
);
1396 fs
->stack
= re_malloc (struct re_fail_stack_ent_t
, fs
->alloc
);
1397 if (fs
->stack
== NULL
)
1403 cur_node
= dfa
->init_node
;
1404 re_node_set_init_empty (&eps_via_nodes
);
1406 if (!regmatch_list_resize (&prev_match
, nmatch
))
1408 regmatch_list_free (&prev_match
);
1409 free_fail_stack_return (fs
);
1412 regmatch_t
*prev_idx_match
= regmatch_list_begin (&prev_match
);
1413 memcpy (prev_idx_match
, pmatch
, sizeof (regmatch_t
) * nmatch
);
1415 for (idx
= pmatch
[0].rm_so
; idx
<= pmatch
[0].rm_eo
;)
1417 update_regs (dfa
, pmatch
, prev_idx_match
, cur_node
, idx
, nmatch
);
1419 if ((idx
== pmatch
[0].rm_eo
&& cur_node
== mctx
->last_node
)
1420 || (fs
&& re_node_set_contains (&eps_via_nodes
, cur_node
)))
1426 for (reg_idx
= 0; reg_idx
< nmatch
; ++reg_idx
)
1427 if (pmatch
[reg_idx
].rm_so
> -1 && pmatch
[reg_idx
].rm_eo
== -1)
1429 cur_node
= pop_fail_stack (fs
, &idx
, nmatch
, pmatch
,
1430 prev_idx_match
, &eps_via_nodes
);
1436 re_node_set_free (&eps_via_nodes
);
1437 regmatch_list_free (&prev_match
);
1438 return free_fail_stack_return (fs
);
1442 /* Proceed to next node. */
1443 cur_node
= proceed_next_node (mctx
, nmatch
, pmatch
, prev_idx_match
,
1445 &eps_via_nodes
, fs
);
1447 if (__glibc_unlikely (cur_node
< 0))
1449 if (__glibc_unlikely (cur_node
== -2))
1451 re_node_set_free (&eps_via_nodes
);
1452 regmatch_list_free (&prev_match
);
1453 free_fail_stack_return (fs
);
1456 cur_node
= pop_fail_stack (fs
, &idx
, nmatch
, pmatch
,
1457 prev_idx_match
, &eps_via_nodes
);
1460 re_node_set_free (&eps_via_nodes
);
1461 regmatch_list_free (&prev_match
);
1462 free_fail_stack_return (fs
);
1467 re_node_set_free (&eps_via_nodes
);
1468 regmatch_list_free (&prev_match
);
1469 return free_fail_stack_return (fs
);
1472 static reg_errcode_t
1473 free_fail_stack_return (struct re_fail_stack_t
*fs
)
1478 for (fs_idx
= 0; fs_idx
< fs
->num
; ++fs_idx
)
1480 re_node_set_free (&fs
->stack
[fs_idx
].eps_via_nodes
);
1481 re_free (fs
->stack
[fs_idx
].regs
);
1483 re_free (fs
->stack
);
1489 update_regs (const re_dfa_t
*dfa
, regmatch_t
*pmatch
,
1490 regmatch_t
*prev_idx_match
, Idx cur_node
, Idx cur_idx
, Idx nmatch
)
1492 int type
= dfa
->nodes
[cur_node
].type
;
1493 if (type
== OP_OPEN_SUBEXP
)
1495 Idx reg_num
= dfa
->nodes
[cur_node
].opr
.idx
+ 1;
1497 /* We are at the first node of this sub expression. */
1498 if (reg_num
< nmatch
)
1500 pmatch
[reg_num
].rm_so
= cur_idx
;
1501 pmatch
[reg_num
].rm_eo
= -1;
1504 else if (type
== OP_CLOSE_SUBEXP
)
1506 /* We are at the last node of this sub expression. */
1507 Idx reg_num
= dfa
->nodes
[cur_node
].opr
.idx
+ 1;
1508 if (reg_num
< nmatch
)
1510 if (pmatch
[reg_num
].rm_so
< cur_idx
)
1512 pmatch
[reg_num
].rm_eo
= cur_idx
;
1513 /* This is a non-empty match or we are not inside an optional
1514 subexpression. Accept this right away. */
1515 memcpy (prev_idx_match
, pmatch
, sizeof (regmatch_t
) * nmatch
);
1519 if (dfa
->nodes
[cur_node
].opt_subexp
1520 && prev_idx_match
[reg_num
].rm_so
!= -1)
1521 /* We transited through an empty match for an optional
1522 subexpression, like (a?)*, and this is not the subexp's
1523 first match. Copy back the old content of the registers
1524 so that matches of an inner subexpression are undone as
1525 well, like in ((a?))*. */
1526 memcpy (pmatch
, prev_idx_match
, sizeof (regmatch_t
) * nmatch
);
1528 /* We completed a subexpression, but it may be part of
1529 an optional one, so do not update PREV_IDX_MATCH. */
1530 pmatch
[reg_num
].rm_eo
= cur_idx
;
1536 /* This function checks the STATE_LOG from the SCTX->last_str_idx to 0
1537 and sift the nodes in each states according to the following rules.
1538 Updated state_log will be wrote to STATE_LOG.
1540 Rules: We throw away the Node 'a' in the STATE_LOG[STR_IDX] if...
1541 1. When STR_IDX == MATCH_LAST(the last index in the state_log):
1542 If 'a' isn't the LAST_NODE and 'a' can't epsilon transit to
1543 the LAST_NODE, we throw away the node 'a'.
1544 2. When 0 <= STR_IDX < MATCH_LAST and 'a' accepts
1545 string 's' and transit to 'b':
1546 i. If 'b' isn't in the STATE_LOG[STR_IDX+strlen('s')], we throw
1548 ii. If 'b' is in the STATE_LOG[STR_IDX+strlen('s')] but 'b' is
1549 thrown away, we throw away the node 'a'.
1550 3. When 0 <= STR_IDX < MATCH_LAST and 'a' epsilon transit to 'b':
1551 i. If 'b' isn't in the STATE_LOG[STR_IDX], we throw away the
1553 ii. If 'b' is in the STATE_LOG[STR_IDX] but 'b' is thrown away,
1554 we throw away the node 'a'. */
1556 #define STATE_NODE_CONTAINS(state,node) \
1557 ((state) != NULL && re_node_set_contains (&(state)->nodes, node))
1559 static reg_errcode_t
1560 sift_states_backward (const re_match_context_t
*mctx
, re_sift_context_t
*sctx
)
1564 Idx str_idx
= sctx
->last_str_idx
;
1565 re_node_set cur_dest
;
1567 DEBUG_ASSERT (mctx
->state_log
!= NULL
&& mctx
->state_log
[str_idx
] != NULL
);
1569 /* Build sifted state_log[str_idx]. It has the nodes which can epsilon
1570 transit to the last_node and the last_node itself. */
1571 err
= re_node_set_init_1 (&cur_dest
, sctx
->last_node
);
1572 if (__glibc_unlikely (err
!= REG_NOERROR
))
1574 err
= update_cur_sifted_state (mctx
, sctx
, str_idx
, &cur_dest
);
1575 if (__glibc_unlikely (err
!= REG_NOERROR
))
1578 /* Then check each states in the state_log. */
1581 /* Update counters. */
1582 null_cnt
= (sctx
->sifted_states
[str_idx
] == NULL
) ? null_cnt
+ 1 : 0;
1583 if (null_cnt
> mctx
->max_mb_elem_len
)
1585 memset (sctx
->sifted_states
, '\0',
1586 sizeof (re_dfastate_t
*) * str_idx
);
1587 re_node_set_free (&cur_dest
);
1590 re_node_set_empty (&cur_dest
);
1593 if (mctx
->state_log
[str_idx
])
1595 err
= build_sifted_states (mctx
, sctx
, str_idx
, &cur_dest
);
1596 if (__glibc_unlikely (err
!= REG_NOERROR
))
1600 /* Add all the nodes which satisfy the following conditions:
1601 - It can epsilon transit to a node in CUR_DEST.
1603 And update state_log. */
1604 err
= update_cur_sifted_state (mctx
, sctx
, str_idx
, &cur_dest
);
1605 if (__glibc_unlikely (err
!= REG_NOERROR
))
1610 re_node_set_free (&cur_dest
);
1614 static reg_errcode_t
1615 __attribute_warn_unused_result__
1616 build_sifted_states (const re_match_context_t
*mctx
, re_sift_context_t
*sctx
,
1617 Idx str_idx
, re_node_set
*cur_dest
)
1619 const re_dfa_t
*const dfa
= mctx
->dfa
;
1620 const re_node_set
*cur_src
= &mctx
->state_log
[str_idx
]->non_eps_nodes
;
1623 /* Then build the next sifted state.
1624 We build the next sifted state on 'cur_dest', and update
1625 'sifted_states[str_idx]' with 'cur_dest'.
1627 'cur_dest' is the sifted state from 'state_log[str_idx + 1]'.
1628 'cur_src' points the node_set of the old 'state_log[str_idx]'
1629 (with the epsilon nodes pre-filtered out). */
1630 for (i
= 0; i
< cur_src
->nelem
; i
++)
1632 Idx prev_node
= cur_src
->elems
[i
];
1635 DEBUG_ASSERT (!IS_EPSILON_NODE (dfa
->nodes
[prev_node
].type
));
1637 #ifdef RE_ENABLE_I18N
1638 /* If the node may accept "multi byte". */
1639 if (dfa
->nodes
[prev_node
].accept_mb
)
1640 naccepted
= sift_states_iter_mb (mctx
, sctx
, prev_node
,
1641 str_idx
, sctx
->last_str_idx
);
1642 #endif /* RE_ENABLE_I18N */
1644 /* We don't check backreferences here.
1645 See update_cur_sifted_state(). */
1647 && check_node_accept (mctx
, dfa
->nodes
+ prev_node
, str_idx
)
1648 && STATE_NODE_CONTAINS (sctx
->sifted_states
[str_idx
+ 1],
1649 dfa
->nexts
[prev_node
]))
1655 if (sctx
->limits
.nelem
)
1657 Idx to_idx
= str_idx
+ naccepted
;
1658 if (check_dst_limits (mctx
, &sctx
->limits
,
1659 dfa
->nexts
[prev_node
], to_idx
,
1660 prev_node
, str_idx
))
1663 ok
= re_node_set_insert (cur_dest
, prev_node
);
1664 if (__glibc_unlikely (! ok
))
1671 /* Helper functions. */
1673 static reg_errcode_t
1674 clean_state_log_if_needed (re_match_context_t
*mctx
, Idx next_state_log_idx
)
1676 Idx top
= mctx
->state_log_top
;
1678 if ((next_state_log_idx
>= mctx
->input
.bufs_len
1679 && mctx
->input
.bufs_len
< mctx
->input
.len
)
1680 || (next_state_log_idx
>= mctx
->input
.valid_len
1681 && mctx
->input
.valid_len
< mctx
->input
.len
))
1684 err
= extend_buffers (mctx
, next_state_log_idx
+ 1);
1685 if (__glibc_unlikely (err
!= REG_NOERROR
))
1689 if (top
< next_state_log_idx
)
1691 memset (mctx
->state_log
+ top
+ 1, '\0',
1692 sizeof (re_dfastate_t
*) * (next_state_log_idx
- top
));
1693 mctx
->state_log_top
= next_state_log_idx
;
1698 static reg_errcode_t
1699 merge_state_array (const re_dfa_t
*dfa
, re_dfastate_t
**dst
,
1700 re_dfastate_t
**src
, Idx num
)
1704 for (st_idx
= 0; st_idx
< num
; ++st_idx
)
1706 if (dst
[st_idx
] == NULL
)
1707 dst
[st_idx
] = src
[st_idx
];
1708 else if (src
[st_idx
] != NULL
)
1710 re_node_set merged_set
;
1711 err
= re_node_set_init_union (&merged_set
, &dst
[st_idx
]->nodes
,
1712 &src
[st_idx
]->nodes
);
1713 if (__glibc_unlikely (err
!= REG_NOERROR
))
1715 dst
[st_idx
] = re_acquire_state (&err
, dfa
, &merged_set
);
1716 re_node_set_free (&merged_set
);
1717 if (__glibc_unlikely (err
!= REG_NOERROR
))
1724 static reg_errcode_t
1725 update_cur_sifted_state (const re_match_context_t
*mctx
,
1726 re_sift_context_t
*sctx
, Idx str_idx
,
1727 re_node_set
*dest_nodes
)
1729 const re_dfa_t
*const dfa
= mctx
->dfa
;
1730 reg_errcode_t err
= REG_NOERROR
;
1731 const re_node_set
*candidates
;
1732 candidates
= ((mctx
->state_log
[str_idx
] == NULL
) ? NULL
1733 : &mctx
->state_log
[str_idx
]->nodes
);
1735 if (dest_nodes
->nelem
== 0)
1736 sctx
->sifted_states
[str_idx
] = NULL
;
1741 /* At first, add the nodes which can epsilon transit to a node in
1743 err
= add_epsilon_src_nodes (dfa
, dest_nodes
, candidates
);
1744 if (__glibc_unlikely (err
!= REG_NOERROR
))
1747 /* Then, check the limitations in the current sift_context. */
1748 if (sctx
->limits
.nelem
)
1750 err
= check_subexp_limits (dfa
, dest_nodes
, candidates
, &sctx
->limits
,
1751 mctx
->bkref_ents
, str_idx
);
1752 if (__glibc_unlikely (err
!= REG_NOERROR
))
1757 sctx
->sifted_states
[str_idx
] = re_acquire_state (&err
, dfa
, dest_nodes
);
1758 if (__glibc_unlikely (err
!= REG_NOERROR
))
1762 if (candidates
&& mctx
->state_log
[str_idx
]->has_backref
)
1764 err
= sift_states_bkref (mctx
, sctx
, str_idx
, candidates
);
1765 if (__glibc_unlikely (err
!= REG_NOERROR
))
1771 static reg_errcode_t
1772 __attribute_warn_unused_result__
1773 add_epsilon_src_nodes (const re_dfa_t
*dfa
, re_node_set
*dest_nodes
,
1774 const re_node_set
*candidates
)
1776 reg_errcode_t err
= REG_NOERROR
;
1779 re_dfastate_t
*state
= re_acquire_state (&err
, dfa
, dest_nodes
);
1780 if (__glibc_unlikely (err
!= REG_NOERROR
))
1783 if (!state
->inveclosure
.alloc
)
1785 err
= re_node_set_alloc (&state
->inveclosure
, dest_nodes
->nelem
);
1786 if (__glibc_unlikely (err
!= REG_NOERROR
))
1788 for (i
= 0; i
< dest_nodes
->nelem
; i
++)
1790 err
= re_node_set_merge (&state
->inveclosure
,
1791 dfa
->inveclosures
+ dest_nodes
->elems
[i
]);
1792 if (__glibc_unlikely (err
!= REG_NOERROR
))
1796 return re_node_set_add_intersect (dest_nodes
, candidates
,
1797 &state
->inveclosure
);
1800 static reg_errcode_t
1801 sub_epsilon_src_nodes (const re_dfa_t
*dfa
, Idx node
, re_node_set
*dest_nodes
,
1802 const re_node_set
*candidates
)
1806 re_node_set
*inv_eclosure
= dfa
->inveclosures
+ node
;
1807 re_node_set except_nodes
;
1808 re_node_set_init_empty (&except_nodes
);
1809 for (ecl_idx
= 0; ecl_idx
< inv_eclosure
->nelem
; ++ecl_idx
)
1811 Idx cur_node
= inv_eclosure
->elems
[ecl_idx
];
1812 if (cur_node
== node
)
1814 if (IS_EPSILON_NODE (dfa
->nodes
[cur_node
].type
))
1816 Idx edst1
= dfa
->edests
[cur_node
].elems
[0];
1817 Idx edst2
= ((dfa
->edests
[cur_node
].nelem
> 1)
1818 ? dfa
->edests
[cur_node
].elems
[1] : -1);
1819 if ((!re_node_set_contains (inv_eclosure
, edst1
)
1820 && re_node_set_contains (dest_nodes
, edst1
))
1822 && !re_node_set_contains (inv_eclosure
, edst2
)
1823 && re_node_set_contains (dest_nodes
, edst2
)))
1825 err
= re_node_set_add_intersect (&except_nodes
, candidates
,
1826 dfa
->inveclosures
+ cur_node
);
1827 if (__glibc_unlikely (err
!= REG_NOERROR
))
1829 re_node_set_free (&except_nodes
);
1835 for (ecl_idx
= 0; ecl_idx
< inv_eclosure
->nelem
; ++ecl_idx
)
1837 Idx cur_node
= inv_eclosure
->elems
[ecl_idx
];
1838 if (!re_node_set_contains (&except_nodes
, cur_node
))
1840 Idx idx
= re_node_set_contains (dest_nodes
, cur_node
) - 1;
1841 re_node_set_remove_at (dest_nodes
, idx
);
1844 re_node_set_free (&except_nodes
);
1849 check_dst_limits (const re_match_context_t
*mctx
, const re_node_set
*limits
,
1850 Idx dst_node
, Idx dst_idx
, Idx src_node
, Idx src_idx
)
1852 const re_dfa_t
*const dfa
= mctx
->dfa
;
1853 Idx lim_idx
, src_pos
, dst_pos
;
1855 Idx dst_bkref_idx
= search_cur_bkref_entry (mctx
, dst_idx
);
1856 Idx src_bkref_idx
= search_cur_bkref_entry (mctx
, src_idx
);
1857 for (lim_idx
= 0; lim_idx
< limits
->nelem
; ++lim_idx
)
1860 struct re_backref_cache_entry
*ent
;
1861 ent
= mctx
->bkref_ents
+ limits
->elems
[lim_idx
];
1862 subexp_idx
= dfa
->nodes
[ent
->node
].opr
.idx
;
1864 dst_pos
= check_dst_limits_calc_pos (mctx
, limits
->elems
[lim_idx
],
1865 subexp_idx
, dst_node
, dst_idx
,
1867 src_pos
= check_dst_limits_calc_pos (mctx
, limits
->elems
[lim_idx
],
1868 subexp_idx
, src_node
, src_idx
,
1872 <src> <dst> ( <subexp> )
1873 ( <subexp> ) <src> <dst>
1874 ( <subexp1> <src> <subexp2> <dst> <subexp3> ) */
1875 if (src_pos
== dst_pos
)
1876 continue; /* This is unrelated limitation. */
1884 check_dst_limits_calc_pos_1 (const re_match_context_t
*mctx
, int boundaries
,
1885 Idx subexp_idx
, Idx from_node
, Idx bkref_idx
)
1887 const re_dfa_t
*const dfa
= mctx
->dfa
;
1888 const re_node_set
*eclosures
= dfa
->eclosures
+ from_node
;
1891 /* Else, we are on the boundary: examine the nodes on the epsilon
1893 for (node_idx
= 0; node_idx
< eclosures
->nelem
; ++node_idx
)
1895 Idx node
= eclosures
->elems
[node_idx
];
1896 switch (dfa
->nodes
[node
].type
)
1899 if (bkref_idx
!= -1)
1901 struct re_backref_cache_entry
*ent
= mctx
->bkref_ents
+ bkref_idx
;
1907 if (ent
->node
!= node
)
1910 if (subexp_idx
< BITSET_WORD_BITS
1911 && !(ent
->eps_reachable_subexps_map
1912 & ((bitset_word_t
) 1 << subexp_idx
)))
1915 /* Recurse trying to reach the OP_OPEN_SUBEXP and
1916 OP_CLOSE_SUBEXP cases below. But, if the
1917 destination node is the same node as the source
1918 node, don't recurse because it would cause an
1919 infinite loop: a regex that exhibits this behavior
1921 dst
= dfa
->edests
[node
].elems
[0];
1922 if (dst
== from_node
)
1926 else /* if (boundaries & 2) */
1931 check_dst_limits_calc_pos_1 (mctx
, boundaries
, subexp_idx
,
1933 if (cpos
== -1 /* && (boundaries & 1) */)
1935 if (cpos
== 0 && (boundaries
& 2))
1938 if (subexp_idx
< BITSET_WORD_BITS
)
1939 ent
->eps_reachable_subexps_map
1940 &= ~((bitset_word_t
) 1 << subexp_idx
);
1942 while (ent
++->more
);
1946 case OP_OPEN_SUBEXP
:
1947 if ((boundaries
& 1) && subexp_idx
== dfa
->nodes
[node
].opr
.idx
)
1951 case OP_CLOSE_SUBEXP
:
1952 if ((boundaries
& 2) && subexp_idx
== dfa
->nodes
[node
].opr
.idx
)
1961 return (boundaries
& 2) ? 1 : 0;
1965 check_dst_limits_calc_pos (const re_match_context_t
*mctx
, Idx limit
,
1966 Idx subexp_idx
, Idx from_node
, Idx str_idx
,
1969 struct re_backref_cache_entry
*lim
= mctx
->bkref_ents
+ limit
;
1972 /* If we are outside the range of the subexpression, return -1 or 1. */
1973 if (str_idx
< lim
->subexp_from
)
1976 if (lim
->subexp_to
< str_idx
)
1979 /* If we are within the subexpression, return 0. */
1980 boundaries
= (str_idx
== lim
->subexp_from
);
1981 boundaries
|= (str_idx
== lim
->subexp_to
) << 1;
1982 if (boundaries
== 0)
1985 /* Else, examine epsilon closure. */
1986 return check_dst_limits_calc_pos_1 (mctx
, boundaries
, subexp_idx
,
1987 from_node
, bkref_idx
);
1990 /* Check the limitations of sub expressions LIMITS, and remove the nodes
1991 which are against limitations from DEST_NODES. */
1993 static reg_errcode_t
1994 check_subexp_limits (const re_dfa_t
*dfa
, re_node_set
*dest_nodes
,
1995 const re_node_set
*candidates
, re_node_set
*limits
,
1996 struct re_backref_cache_entry
*bkref_ents
, Idx str_idx
)
1999 Idx node_idx
, lim_idx
;
2001 for (lim_idx
= 0; lim_idx
< limits
->nelem
; ++lim_idx
)
2004 struct re_backref_cache_entry
*ent
;
2005 ent
= bkref_ents
+ limits
->elems
[lim_idx
];
2007 if (str_idx
<= ent
->subexp_from
|| ent
->str_idx
< str_idx
)
2008 continue; /* This is unrelated limitation. */
2010 subexp_idx
= dfa
->nodes
[ent
->node
].opr
.idx
;
2011 if (ent
->subexp_to
== str_idx
)
2015 for (node_idx
= 0; node_idx
< dest_nodes
->nelem
; ++node_idx
)
2017 Idx node
= dest_nodes
->elems
[node_idx
];
2018 re_token_type_t type
= dfa
->nodes
[node
].type
;
2019 if (type
== OP_OPEN_SUBEXP
2020 && subexp_idx
== dfa
->nodes
[node
].opr
.idx
)
2022 else if (type
== OP_CLOSE_SUBEXP
2023 && subexp_idx
== dfa
->nodes
[node
].opr
.idx
)
2027 /* Check the limitation of the open subexpression. */
2028 /* Note that (ent->subexp_to = str_idx != ent->subexp_from). */
2031 err
= sub_epsilon_src_nodes (dfa
, ops_node
, dest_nodes
,
2033 if (__glibc_unlikely (err
!= REG_NOERROR
))
2037 /* Check the limitation of the close subexpression. */
2039 for (node_idx
= 0; node_idx
< dest_nodes
->nelem
; ++node_idx
)
2041 Idx node
= dest_nodes
->elems
[node_idx
];
2042 if (!re_node_set_contains (dfa
->inveclosures
+ node
,
2044 && !re_node_set_contains (dfa
->eclosures
+ node
,
2047 /* It is against this limitation.
2048 Remove it form the current sifted state. */
2049 err
= sub_epsilon_src_nodes (dfa
, node
, dest_nodes
,
2051 if (__glibc_unlikely (err
!= REG_NOERROR
))
2057 else /* (ent->subexp_to != str_idx) */
2059 for (node_idx
= 0; node_idx
< dest_nodes
->nelem
; ++node_idx
)
2061 Idx node
= dest_nodes
->elems
[node_idx
];
2062 re_token_type_t type
= dfa
->nodes
[node
].type
;
2063 if (type
== OP_CLOSE_SUBEXP
|| type
== OP_OPEN_SUBEXP
)
2065 if (subexp_idx
!= dfa
->nodes
[node
].opr
.idx
)
2067 /* It is against this limitation.
2068 Remove it form the current sifted state. */
2069 err
= sub_epsilon_src_nodes (dfa
, node
, dest_nodes
,
2071 if (__glibc_unlikely (err
!= REG_NOERROR
))
2080 static reg_errcode_t
2081 __attribute_warn_unused_result__
2082 sift_states_bkref (const re_match_context_t
*mctx
, re_sift_context_t
*sctx
,
2083 Idx str_idx
, const re_node_set
*candidates
)
2085 const re_dfa_t
*const dfa
= mctx
->dfa
;
2088 re_sift_context_t local_sctx
;
2089 Idx first_idx
= search_cur_bkref_entry (mctx
, str_idx
);
2091 if (first_idx
== -1)
2094 local_sctx
.sifted_states
= NULL
; /* Mark that it hasn't been initialized. */
2096 for (node_idx
= 0; node_idx
< candidates
->nelem
; ++node_idx
)
2099 re_token_type_t type
;
2100 struct re_backref_cache_entry
*entry
;
2101 node
= candidates
->elems
[node_idx
];
2102 type
= dfa
->nodes
[node
].type
;
2103 /* Avoid infinite loop for the REs like "()\1+". */
2104 if (node
== sctx
->last_node
&& str_idx
== sctx
->last_str_idx
)
2106 if (type
!= OP_BACK_REF
)
2109 entry
= mctx
->bkref_ents
+ first_idx
;
2110 enabled_idx
= first_idx
;
2117 re_dfastate_t
*cur_state
;
2119 if (entry
->node
!= node
)
2121 subexp_len
= entry
->subexp_to
- entry
->subexp_from
;
2122 to_idx
= str_idx
+ subexp_len
;
2123 dst_node
= (subexp_len
? dfa
->nexts
[node
]
2124 : dfa
->edests
[node
].elems
[0]);
2126 if (to_idx
> sctx
->last_str_idx
2127 || sctx
->sifted_states
[to_idx
] == NULL
2128 || !STATE_NODE_CONTAINS (sctx
->sifted_states
[to_idx
], dst_node
)
2129 || check_dst_limits (mctx
, &sctx
->limits
, node
,
2130 str_idx
, dst_node
, to_idx
))
2133 if (local_sctx
.sifted_states
== NULL
)
2136 err
= re_node_set_init_copy (&local_sctx
.limits
, &sctx
->limits
);
2137 if (__glibc_unlikely (err
!= REG_NOERROR
))
2140 local_sctx
.last_node
= node
;
2141 local_sctx
.last_str_idx
= str_idx
;
2142 ok
= re_node_set_insert (&local_sctx
.limits
, enabled_idx
);
2143 if (__glibc_unlikely (! ok
))
2148 cur_state
= local_sctx
.sifted_states
[str_idx
];
2149 err
= sift_states_backward (mctx
, &local_sctx
);
2150 if (__glibc_unlikely (err
!= REG_NOERROR
))
2152 if (sctx
->limited_states
!= NULL
)
2154 err
= merge_state_array (dfa
, sctx
->limited_states
,
2155 local_sctx
.sifted_states
,
2157 if (__glibc_unlikely (err
!= REG_NOERROR
))
2160 local_sctx
.sifted_states
[str_idx
] = cur_state
;
2161 re_node_set_remove (&local_sctx
.limits
, enabled_idx
);
2163 /* mctx->bkref_ents may have changed, reload the pointer. */
2164 entry
= mctx
->bkref_ents
+ enabled_idx
;
2166 while (enabled_idx
++, entry
++->more
);
2170 if (local_sctx
.sifted_states
!= NULL
)
2172 re_node_set_free (&local_sctx
.limits
);
2179 #ifdef RE_ENABLE_I18N
2181 sift_states_iter_mb (const re_match_context_t
*mctx
, re_sift_context_t
*sctx
,
2182 Idx node_idx
, Idx str_idx
, Idx max_str_idx
)
2184 const re_dfa_t
*const dfa
= mctx
->dfa
;
2186 /* Check the node can accept "multi byte". */
2187 naccepted
= check_node_accept_bytes (dfa
, node_idx
, &mctx
->input
, str_idx
);
2188 if (naccepted
> 0 && str_idx
+ naccepted
<= max_str_idx
2189 && !STATE_NODE_CONTAINS (sctx
->sifted_states
[str_idx
+ naccepted
],
2190 dfa
->nexts
[node_idx
]))
2191 /* The node can't accept the "multi byte", or the
2192 destination was already thrown away, then the node
2193 couldn't accept the current input "multi byte". */
2195 /* Otherwise, it is sure that the node could accept
2196 'naccepted' bytes input. */
2199 #endif /* RE_ENABLE_I18N */
2202 /* Functions for state transition. */
2204 /* Return the next state to which the current state STATE will transit by
2205 accepting the current input byte, and update STATE_LOG if necessary.
2206 Return NULL on failure.
2207 If STATE can accept a multibyte char/collating element/back reference
2208 update the destination of STATE_LOG. */
2210 static re_dfastate_t
*
2211 __attribute_warn_unused_result__
2212 transit_state (reg_errcode_t
*err
, re_match_context_t
*mctx
,
2213 re_dfastate_t
*state
)
2215 re_dfastate_t
**trtable
;
2218 #ifdef RE_ENABLE_I18N
2219 /* If the current state can accept multibyte. */
2220 if (__glibc_unlikely (state
->accept_mb
))
2222 *err
= transit_state_mb (mctx
, state
);
2223 if (__glibc_unlikely (*err
!= REG_NOERROR
))
2226 #endif /* RE_ENABLE_I18N */
2228 /* Then decide the next state with the single byte. */
2231 /* don't use transition table */
2232 return transit_state_sb (err
, mctx
, state
);
2235 /* Use transition table */
2236 ch
= re_string_fetch_byte (&mctx
->input
);
2239 trtable
= state
->trtable
;
2240 if (__glibc_likely (trtable
!= NULL
))
2243 trtable
= state
->word_trtable
;
2244 if (__glibc_likely (trtable
!= NULL
))
2246 unsigned int context
;
2248 = re_string_context_at (&mctx
->input
,
2249 re_string_cur_idx (&mctx
->input
) - 1,
2251 if (IS_WORD_CONTEXT (context
))
2252 return trtable
[ch
+ SBC_MAX
];
2257 if (!build_trtable (mctx
->dfa
, state
))
2263 /* Retry, we now have a transition table. */
2267 /* Update the state_log if we need */
2268 static re_dfastate_t
*
2269 merge_state_with_log (reg_errcode_t
*err
, re_match_context_t
*mctx
,
2270 re_dfastate_t
*next_state
)
2272 const re_dfa_t
*const dfa
= mctx
->dfa
;
2273 Idx cur_idx
= re_string_cur_idx (&mctx
->input
);
2275 if (cur_idx
> mctx
->state_log_top
)
2277 mctx
->state_log
[cur_idx
] = next_state
;
2278 mctx
->state_log_top
= cur_idx
;
2280 else if (mctx
->state_log
[cur_idx
] == 0)
2282 mctx
->state_log
[cur_idx
] = next_state
;
2286 re_dfastate_t
*pstate
;
2287 unsigned int context
;
2288 re_node_set next_nodes
, *log_nodes
, *table_nodes
= NULL
;
2289 /* If (state_log[cur_idx] != 0), it implies that cur_idx is
2290 the destination of a multibyte char/collating element/
2291 back reference. Then the next state is the union set of
2292 these destinations and the results of the transition table. */
2293 pstate
= mctx
->state_log
[cur_idx
];
2294 log_nodes
= pstate
->entrance_nodes
;
2295 if (next_state
!= NULL
)
2297 table_nodes
= next_state
->entrance_nodes
;
2298 *err
= re_node_set_init_union (&next_nodes
, table_nodes
,
2300 if (__glibc_unlikely (*err
!= REG_NOERROR
))
2304 next_nodes
= *log_nodes
;
2305 /* Note: We already add the nodes of the initial state,
2306 then we don't need to add them here. */
2308 context
= re_string_context_at (&mctx
->input
,
2309 re_string_cur_idx (&mctx
->input
) - 1,
2311 next_state
= mctx
->state_log
[cur_idx
]
2312 = re_acquire_state_context (err
, dfa
, &next_nodes
, context
);
2313 /* We don't need to check errors here, since the return value of
2314 this function is next_state and ERR is already set. */
2316 if (table_nodes
!= NULL
)
2317 re_node_set_free (&next_nodes
);
2320 if (__glibc_unlikely (dfa
->nbackref
) && next_state
!= NULL
)
2322 /* Check OP_OPEN_SUBEXP in the current state in case that we use them
2323 later. We must check them here, since the back references in the
2324 next state might use them. */
2325 *err
= check_subexp_matching_top (mctx
, &next_state
->nodes
,
2327 if (__glibc_unlikely (*err
!= REG_NOERROR
))
2330 /* If the next state has back references. */
2331 if (next_state
->has_backref
)
2333 *err
= transit_state_bkref (mctx
, &next_state
->nodes
);
2334 if (__glibc_unlikely (*err
!= REG_NOERROR
))
2336 next_state
= mctx
->state_log
[cur_idx
];
2343 /* Skip bytes in the input that correspond to part of a
2344 multi-byte match, then look in the log for a state
2345 from which to restart matching. */
2346 static re_dfastate_t
*
2347 find_recover_state (reg_errcode_t
*err
, re_match_context_t
*mctx
)
2349 re_dfastate_t
*cur_state
;
2352 Idx max
= mctx
->state_log_top
;
2353 Idx cur_str_idx
= re_string_cur_idx (&mctx
->input
);
2357 if (++cur_str_idx
> max
)
2359 re_string_skip_bytes (&mctx
->input
, 1);
2361 while (mctx
->state_log
[cur_str_idx
] == NULL
);
2363 cur_state
= merge_state_with_log (err
, mctx
, NULL
);
2365 while (*err
== REG_NOERROR
&& cur_state
== NULL
);
2369 /* Helper functions for transit_state. */
2371 /* From the node set CUR_NODES, pick up the nodes whose types are
2372 OP_OPEN_SUBEXP and which have corresponding back references in the regular
2373 expression. And register them to use them later for evaluating the
2374 corresponding back references. */
2376 static reg_errcode_t
2377 check_subexp_matching_top (re_match_context_t
*mctx
, re_node_set
*cur_nodes
,
2380 const re_dfa_t
*const dfa
= mctx
->dfa
;
2384 /* TODO: This isn't efficient.
2385 Because there might be more than one nodes whose types are
2386 OP_OPEN_SUBEXP and whose index is SUBEXP_IDX, we must check all
2389 for (node_idx
= 0; node_idx
< cur_nodes
->nelem
; ++node_idx
)
2391 Idx node
= cur_nodes
->elems
[node_idx
];
2392 if (dfa
->nodes
[node
].type
== OP_OPEN_SUBEXP
2393 && dfa
->nodes
[node
].opr
.idx
< BITSET_WORD_BITS
2394 && (dfa
->used_bkref_map
2395 & ((bitset_word_t
) 1 << dfa
->nodes
[node
].opr
.idx
)))
2397 err
= match_ctx_add_subtop (mctx
, node
, str_idx
);
2398 if (__glibc_unlikely (err
!= REG_NOERROR
))
2406 /* Return the next state to which the current state STATE will transit by
2407 accepting the current input byte. Return NULL on failure. */
2409 static re_dfastate_t
*
2410 transit_state_sb (reg_errcode_t
*err
, re_match_context_t
*mctx
,
2411 re_dfastate_t
*state
)
2413 const re_dfa_t
*const dfa
= mctx
->dfa
;
2414 re_node_set next_nodes
;
2415 re_dfastate_t
*next_state
;
2416 Idx node_cnt
, cur_str_idx
= re_string_cur_idx (&mctx
->input
);
2417 unsigned int context
;
2419 *err
= re_node_set_alloc (&next_nodes
, state
->nodes
.nelem
+ 1);
2420 if (__glibc_unlikely (*err
!= REG_NOERROR
))
2422 for (node_cnt
= 0; node_cnt
< state
->nodes
.nelem
; ++node_cnt
)
2424 Idx cur_node
= state
->nodes
.elems
[node_cnt
];
2425 if (check_node_accept (mctx
, dfa
->nodes
+ cur_node
, cur_str_idx
))
2427 *err
= re_node_set_merge (&next_nodes
,
2428 dfa
->eclosures
+ dfa
->nexts
[cur_node
]);
2429 if (__glibc_unlikely (*err
!= REG_NOERROR
))
2431 re_node_set_free (&next_nodes
);
2436 context
= re_string_context_at (&mctx
->input
, cur_str_idx
, mctx
->eflags
);
2437 next_state
= re_acquire_state_context (err
, dfa
, &next_nodes
, context
);
2438 /* We don't need to check errors here, since the return value of
2439 this function is next_state and ERR is already set. */
2441 re_node_set_free (&next_nodes
);
2442 re_string_skip_bytes (&mctx
->input
, 1);
2447 #ifdef RE_ENABLE_I18N
2448 static reg_errcode_t
2449 transit_state_mb (re_match_context_t
*mctx
, re_dfastate_t
*pstate
)
2451 const re_dfa_t
*const dfa
= mctx
->dfa
;
2455 for (i
= 0; i
< pstate
->nodes
.nelem
; ++i
)
2457 re_node_set dest_nodes
, *new_nodes
;
2458 Idx cur_node_idx
= pstate
->nodes
.elems
[i
];
2461 unsigned int context
;
2462 re_dfastate_t
*dest_state
;
2464 if (!dfa
->nodes
[cur_node_idx
].accept_mb
)
2467 if (dfa
->nodes
[cur_node_idx
].constraint
)
2469 context
= re_string_context_at (&mctx
->input
,
2470 re_string_cur_idx (&mctx
->input
),
2472 if (NOT_SATISFY_NEXT_CONSTRAINT (dfa
->nodes
[cur_node_idx
].constraint
,
2477 /* How many bytes the node can accept? */
2478 naccepted
= check_node_accept_bytes (dfa
, cur_node_idx
, &mctx
->input
,
2479 re_string_cur_idx (&mctx
->input
));
2483 /* The node can accepts 'naccepted' bytes. */
2484 dest_idx
= re_string_cur_idx (&mctx
->input
) + naccepted
;
2485 mctx
->max_mb_elem_len
= ((mctx
->max_mb_elem_len
< naccepted
) ? naccepted
2486 : mctx
->max_mb_elem_len
);
2487 err
= clean_state_log_if_needed (mctx
, dest_idx
);
2488 if (__glibc_unlikely (err
!= REG_NOERROR
))
2490 DEBUG_ASSERT (dfa
->nexts
[cur_node_idx
] != -1);
2491 new_nodes
= dfa
->eclosures
+ dfa
->nexts
[cur_node_idx
];
2493 dest_state
= mctx
->state_log
[dest_idx
];
2494 if (dest_state
== NULL
)
2495 dest_nodes
= *new_nodes
;
2498 err
= re_node_set_init_union (&dest_nodes
,
2499 dest_state
->entrance_nodes
, new_nodes
);
2500 if (__glibc_unlikely (err
!= REG_NOERROR
))
2503 context
= re_string_context_at (&mctx
->input
, dest_idx
- 1,
2505 mctx
->state_log
[dest_idx
]
2506 = re_acquire_state_context (&err
, dfa
, &dest_nodes
, context
);
2507 if (dest_state
!= NULL
)
2508 re_node_set_free (&dest_nodes
);
2509 if (__glibc_unlikely (mctx
->state_log
[dest_idx
] == NULL
2510 && err
!= REG_NOERROR
))
2515 #endif /* RE_ENABLE_I18N */
2517 static reg_errcode_t
2518 transit_state_bkref (re_match_context_t
*mctx
, const re_node_set
*nodes
)
2520 const re_dfa_t
*const dfa
= mctx
->dfa
;
2523 Idx cur_str_idx
= re_string_cur_idx (&mctx
->input
);
2525 for (i
= 0; i
< nodes
->nelem
; ++i
)
2527 Idx dest_str_idx
, prev_nelem
, bkc_idx
;
2528 Idx node_idx
= nodes
->elems
[i
];
2529 unsigned int context
;
2530 const re_token_t
*node
= dfa
->nodes
+ node_idx
;
2531 re_node_set
*new_dest_nodes
;
2533 /* Check whether 'node' is a backreference or not. */
2534 if (node
->type
!= OP_BACK_REF
)
2537 if (node
->constraint
)
2539 context
= re_string_context_at (&mctx
->input
, cur_str_idx
,
2541 if (NOT_SATISFY_NEXT_CONSTRAINT (node
->constraint
, context
))
2545 /* 'node' is a backreference.
2546 Check the substring which the substring matched. */
2547 bkc_idx
= mctx
->nbkref_ents
;
2548 err
= get_subexp (mctx
, node_idx
, cur_str_idx
);
2549 if (__glibc_unlikely (err
!= REG_NOERROR
))
2552 /* And add the epsilon closures (which is 'new_dest_nodes') of
2553 the backreference to appropriate state_log. */
2554 DEBUG_ASSERT (dfa
->nexts
[node_idx
] != -1);
2555 for (; bkc_idx
< mctx
->nbkref_ents
; ++bkc_idx
)
2558 re_dfastate_t
*dest_state
;
2559 struct re_backref_cache_entry
*bkref_ent
;
2560 bkref_ent
= mctx
->bkref_ents
+ bkc_idx
;
2561 if (bkref_ent
->node
!= node_idx
|| bkref_ent
->str_idx
!= cur_str_idx
)
2563 subexp_len
= bkref_ent
->subexp_to
- bkref_ent
->subexp_from
;
2564 new_dest_nodes
= (subexp_len
== 0
2565 ? dfa
->eclosures
+ dfa
->edests
[node_idx
].elems
[0]
2566 : dfa
->eclosures
+ dfa
->nexts
[node_idx
]);
2567 dest_str_idx
= (cur_str_idx
+ bkref_ent
->subexp_to
2568 - bkref_ent
->subexp_from
);
2569 context
= re_string_context_at (&mctx
->input
, dest_str_idx
- 1,
2571 dest_state
= mctx
->state_log
[dest_str_idx
];
2572 prev_nelem
= ((mctx
->state_log
[cur_str_idx
] == NULL
) ? 0
2573 : mctx
->state_log
[cur_str_idx
]->nodes
.nelem
);
2574 /* Add 'new_dest_node' to state_log. */
2575 if (dest_state
== NULL
)
2577 mctx
->state_log
[dest_str_idx
]
2578 = re_acquire_state_context (&err
, dfa
, new_dest_nodes
,
2580 if (__glibc_unlikely (mctx
->state_log
[dest_str_idx
] == NULL
2581 && err
!= REG_NOERROR
))
2586 re_node_set dest_nodes
;
2587 err
= re_node_set_init_union (&dest_nodes
,
2588 dest_state
->entrance_nodes
,
2590 if (__glibc_unlikely (err
!= REG_NOERROR
))
2592 re_node_set_free (&dest_nodes
);
2595 mctx
->state_log
[dest_str_idx
]
2596 = re_acquire_state_context (&err
, dfa
, &dest_nodes
, context
);
2597 re_node_set_free (&dest_nodes
);
2598 if (__glibc_unlikely (mctx
->state_log
[dest_str_idx
] == NULL
2599 && err
!= REG_NOERROR
))
2602 /* We need to check recursively if the backreference can epsilon
2605 && mctx
->state_log
[cur_str_idx
]->nodes
.nelem
> prev_nelem
)
2607 err
= check_subexp_matching_top (mctx
, new_dest_nodes
,
2609 if (__glibc_unlikely (err
!= REG_NOERROR
))
2611 err
= transit_state_bkref (mctx
, new_dest_nodes
);
2612 if (__glibc_unlikely (err
!= REG_NOERROR
))
2622 /* Enumerate all the candidates which the backreference BKREF_NODE can match
2623 at BKREF_STR_IDX, and register them by match_ctx_add_entry().
2624 Note that we might collect inappropriate candidates here.
2625 However, the cost of checking them strictly here is too high, then we
2626 delay these checking for prune_impossible_nodes(). */
2628 static reg_errcode_t
2629 __attribute_warn_unused_result__
2630 get_subexp (re_match_context_t
*mctx
, Idx bkref_node
, Idx bkref_str_idx
)
2632 const re_dfa_t
*const dfa
= mctx
->dfa
;
2633 Idx subexp_num
, sub_top_idx
;
2634 const char *buf
= (const char *) re_string_get_buffer (&mctx
->input
);
2635 /* Return if we have already checked BKREF_NODE at BKREF_STR_IDX. */
2636 Idx cache_idx
= search_cur_bkref_entry (mctx
, bkref_str_idx
);
2637 if (cache_idx
!= -1)
2639 const struct re_backref_cache_entry
*entry
2640 = mctx
->bkref_ents
+ cache_idx
;
2642 if (entry
->node
== bkref_node
)
2643 return REG_NOERROR
; /* We already checked it. */
2644 while (entry
++->more
);
2647 subexp_num
= dfa
->nodes
[bkref_node
].opr
.idx
;
2649 /* For each sub expression */
2650 for (sub_top_idx
= 0; sub_top_idx
< mctx
->nsub_tops
; ++sub_top_idx
)
2653 re_sub_match_top_t
*sub_top
= mctx
->sub_tops
[sub_top_idx
];
2654 re_sub_match_last_t
*sub_last
;
2655 Idx sub_last_idx
, sl_str
, bkref_str_off
;
2657 if (dfa
->nodes
[sub_top
->node
].opr
.idx
!= subexp_num
)
2658 continue; /* It isn't related. */
2660 sl_str
= sub_top
->str_idx
;
2661 bkref_str_off
= bkref_str_idx
;
2662 /* At first, check the last node of sub expressions we already
2664 for (sub_last_idx
= 0; sub_last_idx
< sub_top
->nlasts
; ++sub_last_idx
)
2666 regoff_t sl_str_diff
;
2667 sub_last
= sub_top
->lasts
[sub_last_idx
];
2668 sl_str_diff
= sub_last
->str_idx
- sl_str
;
2669 /* The matched string by the sub expression match with the substring
2670 at the back reference? */
2671 if (sl_str_diff
> 0)
2673 if (__glibc_unlikely (bkref_str_off
+ sl_str_diff
2674 > mctx
->input
.valid_len
))
2676 /* Not enough chars for a successful match. */
2677 if (bkref_str_off
+ sl_str_diff
> mctx
->input
.len
)
2680 err
= clean_state_log_if_needed (mctx
,
2683 if (__glibc_unlikely (err
!= REG_NOERROR
))
2685 buf
= (const char *) re_string_get_buffer (&mctx
->input
);
2687 if (memcmp (buf
+ bkref_str_off
, buf
+ sl_str
, sl_str_diff
) != 0)
2688 /* We don't need to search this sub expression any more. */
2691 bkref_str_off
+= sl_str_diff
;
2692 sl_str
+= sl_str_diff
;
2693 err
= get_subexp_sub (mctx
, sub_top
, sub_last
, bkref_node
,
2696 /* Reload buf, since the preceding call might have reallocated
2698 buf
= (const char *) re_string_get_buffer (&mctx
->input
);
2700 if (err
== REG_NOMATCH
)
2702 if (__glibc_unlikely (err
!= REG_NOERROR
))
2706 if (sub_last_idx
< sub_top
->nlasts
)
2708 if (sub_last_idx
> 0)
2710 /* Then, search for the other last nodes of the sub expression. */
2711 for (; sl_str
<= bkref_str_idx
; ++sl_str
)
2714 regoff_t sl_str_off
;
2715 const re_node_set
*nodes
;
2716 sl_str_off
= sl_str
- sub_top
->str_idx
;
2717 /* The matched string by the sub expression match with the substring
2718 at the back reference? */
2721 if (__glibc_unlikely (bkref_str_off
>= mctx
->input
.valid_len
))
2723 /* If we are at the end of the input, we cannot match. */
2724 if (bkref_str_off
>= mctx
->input
.len
)
2727 err
= extend_buffers (mctx
, bkref_str_off
+ 1);
2728 if (__glibc_unlikely (err
!= REG_NOERROR
))
2731 buf
= (const char *) re_string_get_buffer (&mctx
->input
);
2733 if (buf
[bkref_str_off
++] != buf
[sl_str
- 1])
2734 break; /* We don't need to search this sub expression
2737 if (mctx
->state_log
[sl_str
] == NULL
)
2739 /* Does this state have a ')' of the sub expression? */
2740 nodes
= &mctx
->state_log
[sl_str
]->nodes
;
2741 cls_node
= find_subexp_node (dfa
, nodes
, subexp_num
,
2745 if (sub_top
->path
== NULL
)
2747 sub_top
->path
= calloc (sizeof (state_array_t
),
2748 sl_str
- sub_top
->str_idx
+ 1);
2749 if (sub_top
->path
== NULL
)
2752 /* Can the OP_OPEN_SUBEXP node arrive the OP_CLOSE_SUBEXP node
2753 in the current context? */
2754 err
= check_arrival (mctx
, sub_top
->path
, sub_top
->node
,
2755 sub_top
->str_idx
, cls_node
, sl_str
,
2757 if (err
== REG_NOMATCH
)
2759 if (__glibc_unlikely (err
!= REG_NOERROR
))
2761 sub_last
= match_ctx_add_sublast (sub_top
, cls_node
, sl_str
);
2762 if (__glibc_unlikely (sub_last
== NULL
))
2764 err
= get_subexp_sub (mctx
, sub_top
, sub_last
, bkref_node
,
2766 buf
= (const char *) re_string_get_buffer (&mctx
->input
);
2767 if (err
== REG_NOMATCH
)
2769 if (__glibc_unlikely (err
!= REG_NOERROR
))
2776 /* Helper functions for get_subexp(). */
2778 /* Check SUB_LAST can arrive to the back reference BKREF_NODE at BKREF_STR.
2779 If it can arrive, register the sub expression expressed with SUB_TOP
2782 static reg_errcode_t
2783 get_subexp_sub (re_match_context_t
*mctx
, const re_sub_match_top_t
*sub_top
,
2784 re_sub_match_last_t
*sub_last
, Idx bkref_node
, Idx bkref_str
)
2788 /* Can the subexpression arrive the back reference? */
2789 err
= check_arrival (mctx
, &sub_last
->path
, sub_last
->node
,
2790 sub_last
->str_idx
, bkref_node
, bkref_str
,
2792 if (err
!= REG_NOERROR
)
2794 err
= match_ctx_add_entry (mctx
, bkref_node
, bkref_str
, sub_top
->str_idx
,
2796 if (__glibc_unlikely (err
!= REG_NOERROR
))
2798 to_idx
= bkref_str
+ sub_last
->str_idx
- sub_top
->str_idx
;
2799 return clean_state_log_if_needed (mctx
, to_idx
);
2802 /* Find the first node which is '(' or ')' and whose index is SUBEXP_IDX.
2803 Search '(' if FL_OPEN, or search ')' otherwise.
2804 TODO: This function isn't efficient...
2805 Because there might be more than one nodes whose types are
2806 OP_OPEN_SUBEXP and whose index is SUBEXP_IDX, we must check all
2811 find_subexp_node (const re_dfa_t
*dfa
, const re_node_set
*nodes
,
2812 Idx subexp_idx
, int type
)
2815 for (cls_idx
= 0; cls_idx
< nodes
->nelem
; ++cls_idx
)
2817 Idx cls_node
= nodes
->elems
[cls_idx
];
2818 const re_token_t
*node
= dfa
->nodes
+ cls_node
;
2819 if (node
->type
== type
2820 && node
->opr
.idx
== subexp_idx
)
2826 /* Check whether the node TOP_NODE at TOP_STR can arrive to the node
2827 LAST_NODE at LAST_STR. We record the path onto PATH since it will be
2829 Return REG_NOERROR if it can arrive, REG_NOMATCH if it cannot,
2830 REG_ESPACE if memory is exhausted. */
2832 static reg_errcode_t
2833 __attribute_warn_unused_result__
2834 check_arrival (re_match_context_t
*mctx
, state_array_t
*path
, Idx top_node
,
2835 Idx top_str
, Idx last_node
, Idx last_str
, int type
)
2837 const re_dfa_t
*const dfa
= mctx
->dfa
;
2838 reg_errcode_t err
= REG_NOERROR
;
2839 Idx subexp_num
, backup_cur_idx
, str_idx
, null_cnt
;
2840 re_dfastate_t
*cur_state
= NULL
;
2841 re_node_set
*cur_nodes
, next_nodes
;
2842 re_dfastate_t
**backup_state_log
;
2843 unsigned int context
;
2845 subexp_num
= dfa
->nodes
[top_node
].opr
.idx
;
2846 /* Extend the buffer if we need. */
2847 if (__glibc_unlikely (path
->alloc
< last_str
+ mctx
->max_mb_elem_len
+ 1))
2849 re_dfastate_t
**new_array
;
2850 Idx old_alloc
= path
->alloc
;
2851 Idx incr_alloc
= last_str
+ mctx
->max_mb_elem_len
+ 1;
2853 if (__glibc_unlikely (IDX_MAX
- old_alloc
< incr_alloc
))
2855 new_alloc
= old_alloc
+ incr_alloc
;
2856 if (__glibc_unlikely (SIZE_MAX
/ sizeof (re_dfastate_t
*) < new_alloc
))
2858 new_array
= re_realloc (path
->array
, re_dfastate_t
*, new_alloc
);
2859 if (__glibc_unlikely (new_array
== NULL
))
2861 path
->array
= new_array
;
2862 path
->alloc
= new_alloc
;
2863 memset (new_array
+ old_alloc
, '\0',
2864 sizeof (re_dfastate_t
*) * (path
->alloc
- old_alloc
));
2867 str_idx
= path
->next_idx
? path
->next_idx
: top_str
;
2869 /* Temporary modify MCTX. */
2870 backup_state_log
= mctx
->state_log
;
2871 backup_cur_idx
= mctx
->input
.cur_idx
;
2872 mctx
->state_log
= path
->array
;
2873 mctx
->input
.cur_idx
= str_idx
;
2875 /* Setup initial node set. */
2876 context
= re_string_context_at (&mctx
->input
, str_idx
- 1, mctx
->eflags
);
2877 if (str_idx
== top_str
)
2879 err
= re_node_set_init_1 (&next_nodes
, top_node
);
2880 if (__glibc_unlikely (err
!= REG_NOERROR
))
2882 err
= check_arrival_expand_ecl (dfa
, &next_nodes
, subexp_num
, type
);
2883 if (__glibc_unlikely (err
!= REG_NOERROR
))
2885 re_node_set_free (&next_nodes
);
2891 cur_state
= mctx
->state_log
[str_idx
];
2892 if (cur_state
&& cur_state
->has_backref
)
2894 err
= re_node_set_init_copy (&next_nodes
, &cur_state
->nodes
);
2895 if (__glibc_unlikely (err
!= REG_NOERROR
))
2899 re_node_set_init_empty (&next_nodes
);
2901 if (str_idx
== top_str
|| (cur_state
&& cur_state
->has_backref
))
2903 if (next_nodes
.nelem
)
2905 err
= expand_bkref_cache (mctx
, &next_nodes
, str_idx
,
2907 if (__glibc_unlikely (err
!= REG_NOERROR
))
2909 re_node_set_free (&next_nodes
);
2913 cur_state
= re_acquire_state_context (&err
, dfa
, &next_nodes
, context
);
2914 if (__glibc_unlikely (cur_state
== NULL
&& err
!= REG_NOERROR
))
2916 re_node_set_free (&next_nodes
);
2919 mctx
->state_log
[str_idx
] = cur_state
;
2922 for (null_cnt
= 0; str_idx
< last_str
&& null_cnt
<= mctx
->max_mb_elem_len
;)
2924 re_node_set_empty (&next_nodes
);
2925 if (mctx
->state_log
[str_idx
+ 1])
2927 err
= re_node_set_merge (&next_nodes
,
2928 &mctx
->state_log
[str_idx
+ 1]->nodes
);
2929 if (__glibc_unlikely (err
!= REG_NOERROR
))
2931 re_node_set_free (&next_nodes
);
2937 err
= check_arrival_add_next_nodes (mctx
, str_idx
,
2938 &cur_state
->non_eps_nodes
,
2940 if (__glibc_unlikely (err
!= REG_NOERROR
))
2942 re_node_set_free (&next_nodes
);
2947 if (next_nodes
.nelem
)
2949 err
= check_arrival_expand_ecl (dfa
, &next_nodes
, subexp_num
, type
);
2950 if (__glibc_unlikely (err
!= REG_NOERROR
))
2952 re_node_set_free (&next_nodes
);
2955 err
= expand_bkref_cache (mctx
, &next_nodes
, str_idx
,
2957 if (__glibc_unlikely (err
!= REG_NOERROR
))
2959 re_node_set_free (&next_nodes
);
2963 context
= re_string_context_at (&mctx
->input
, str_idx
- 1, mctx
->eflags
);
2964 cur_state
= re_acquire_state_context (&err
, dfa
, &next_nodes
, context
);
2965 if (__glibc_unlikely (cur_state
== NULL
&& err
!= REG_NOERROR
))
2967 re_node_set_free (&next_nodes
);
2970 mctx
->state_log
[str_idx
] = cur_state
;
2971 null_cnt
= cur_state
== NULL
? null_cnt
+ 1 : 0;
2973 re_node_set_free (&next_nodes
);
2974 cur_nodes
= (mctx
->state_log
[last_str
] == NULL
? NULL
2975 : &mctx
->state_log
[last_str
]->nodes
);
2976 path
->next_idx
= str_idx
;
2979 mctx
->state_log
= backup_state_log
;
2980 mctx
->input
.cur_idx
= backup_cur_idx
;
2982 /* Then check the current node set has the node LAST_NODE. */
2983 if (cur_nodes
!= NULL
&& re_node_set_contains (cur_nodes
, last_node
))
2989 /* Helper functions for check_arrival. */
2991 /* Calculate the destination nodes of CUR_NODES at STR_IDX, and append them
2993 TODO: This function is similar to the functions transit_state*(),
2994 however this function has many additional works.
2995 Can't we unify them? */
2997 static reg_errcode_t
2998 __attribute_warn_unused_result__
2999 check_arrival_add_next_nodes (re_match_context_t
*mctx
, Idx str_idx
,
3000 re_node_set
*cur_nodes
, re_node_set
*next_nodes
)
3002 const re_dfa_t
*const dfa
= mctx
->dfa
;
3005 #ifdef RE_ENABLE_I18N
3006 reg_errcode_t err
= REG_NOERROR
;
3008 re_node_set union_set
;
3009 re_node_set_init_empty (&union_set
);
3010 for (cur_idx
= 0; cur_idx
< cur_nodes
->nelem
; ++cur_idx
)
3013 Idx cur_node
= cur_nodes
->elems
[cur_idx
];
3014 DEBUG_ASSERT (!IS_EPSILON_NODE (dfa
->nodes
[cur_node
].type
));
3016 #ifdef RE_ENABLE_I18N
3017 /* If the node may accept "multi byte". */
3018 if (dfa
->nodes
[cur_node
].accept_mb
)
3020 naccepted
= check_node_accept_bytes (dfa
, cur_node
, &mctx
->input
,
3024 re_dfastate_t
*dest_state
;
3025 Idx next_node
= dfa
->nexts
[cur_node
];
3026 Idx next_idx
= str_idx
+ naccepted
;
3027 dest_state
= mctx
->state_log
[next_idx
];
3028 re_node_set_empty (&union_set
);
3031 err
= re_node_set_merge (&union_set
, &dest_state
->nodes
);
3032 if (__glibc_unlikely (err
!= REG_NOERROR
))
3034 re_node_set_free (&union_set
);
3038 ok
= re_node_set_insert (&union_set
, next_node
);
3039 if (__glibc_unlikely (! ok
))
3041 re_node_set_free (&union_set
);
3044 mctx
->state_log
[next_idx
] = re_acquire_state (&err
, dfa
,
3046 if (__glibc_unlikely (mctx
->state_log
[next_idx
] == NULL
3047 && err
!= REG_NOERROR
))
3049 re_node_set_free (&union_set
);
3054 #endif /* RE_ENABLE_I18N */
3056 || check_node_accept (mctx
, dfa
->nodes
+ cur_node
, str_idx
))
3058 ok
= re_node_set_insert (next_nodes
, dfa
->nexts
[cur_node
]);
3059 if (__glibc_unlikely (! ok
))
3061 re_node_set_free (&union_set
);
3066 re_node_set_free (&union_set
);
3070 /* For all the nodes in CUR_NODES, add the epsilon closures of them to
3071 CUR_NODES, however exclude the nodes which are:
3072 - inside the sub expression whose number is EX_SUBEXP, if FL_OPEN.
3073 - out of the sub expression whose number is EX_SUBEXP, if !FL_OPEN.
3076 static reg_errcode_t
3077 check_arrival_expand_ecl (const re_dfa_t
*dfa
, re_node_set
*cur_nodes
,
3078 Idx ex_subexp
, int type
)
3081 Idx idx
, outside_node
;
3082 re_node_set new_nodes
;
3083 DEBUG_ASSERT (cur_nodes
->nelem
);
3084 err
= re_node_set_alloc (&new_nodes
, cur_nodes
->nelem
);
3085 if (__glibc_unlikely (err
!= REG_NOERROR
))
3087 /* Create a new node set NEW_NODES with the nodes which are epsilon
3088 closures of the node in CUR_NODES. */
3090 for (idx
= 0; idx
< cur_nodes
->nelem
; ++idx
)
3092 Idx cur_node
= cur_nodes
->elems
[idx
];
3093 const re_node_set
*eclosure
= dfa
->eclosures
+ cur_node
;
3094 outside_node
= find_subexp_node (dfa
, eclosure
, ex_subexp
, type
);
3095 if (outside_node
== -1)
3097 /* There are no problematic nodes, just merge them. */
3098 err
= re_node_set_merge (&new_nodes
, eclosure
);
3099 if (__glibc_unlikely (err
!= REG_NOERROR
))
3101 re_node_set_free (&new_nodes
);
3107 /* There are problematic nodes, re-calculate incrementally. */
3108 err
= check_arrival_expand_ecl_sub (dfa
, &new_nodes
, cur_node
,
3110 if (__glibc_unlikely (err
!= REG_NOERROR
))
3112 re_node_set_free (&new_nodes
);
3117 re_node_set_free (cur_nodes
);
3118 *cur_nodes
= new_nodes
;
3122 /* Helper function for check_arrival_expand_ecl.
3123 Check incrementally the epsilon closure of TARGET, and if it isn't
3124 problematic append it to DST_NODES. */
3126 static reg_errcode_t
3127 __attribute_warn_unused_result__
3128 check_arrival_expand_ecl_sub (const re_dfa_t
*dfa
, re_node_set
*dst_nodes
,
3129 Idx target
, Idx ex_subexp
, int type
)
3132 for (cur_node
= target
; !re_node_set_contains (dst_nodes
, cur_node
);)
3136 if (dfa
->nodes
[cur_node
].type
== type
3137 && dfa
->nodes
[cur_node
].opr
.idx
== ex_subexp
)
3139 if (type
== OP_CLOSE_SUBEXP
)
3141 ok
= re_node_set_insert (dst_nodes
, cur_node
);
3142 if (__glibc_unlikely (! ok
))
3147 ok
= re_node_set_insert (dst_nodes
, cur_node
);
3148 if (__glibc_unlikely (! ok
))
3150 if (dfa
->edests
[cur_node
].nelem
== 0)
3152 if (dfa
->edests
[cur_node
].nelem
== 2)
3155 err
= check_arrival_expand_ecl_sub (dfa
, dst_nodes
,
3156 dfa
->edests
[cur_node
].elems
[1],
3158 if (__glibc_unlikely (err
!= REG_NOERROR
))
3161 cur_node
= dfa
->edests
[cur_node
].elems
[0];
3167 /* For all the back references in the current state, calculate the
3168 destination of the back references by the appropriate entry
3169 in MCTX->BKREF_ENTS. */
3171 static reg_errcode_t
3172 __attribute_warn_unused_result__
3173 expand_bkref_cache (re_match_context_t
*mctx
, re_node_set
*cur_nodes
,
3174 Idx cur_str
, Idx subexp_num
, int type
)
3176 const re_dfa_t
*const dfa
= mctx
->dfa
;
3178 Idx cache_idx_start
= search_cur_bkref_entry (mctx
, cur_str
);
3179 struct re_backref_cache_entry
*ent
;
3181 if (cache_idx_start
== -1)
3185 ent
= mctx
->bkref_ents
+ cache_idx_start
;
3188 Idx to_idx
, next_node
;
3190 /* Is this entry ENT is appropriate? */
3191 if (!re_node_set_contains (cur_nodes
, ent
->node
))
3194 to_idx
= cur_str
+ ent
->subexp_to
- ent
->subexp_from
;
3195 /* Calculate the destination of the back reference, and append it
3196 to MCTX->STATE_LOG. */
3197 if (to_idx
== cur_str
)
3199 /* The backreference did epsilon transit, we must re-check all the
3200 node in the current state. */
3201 re_node_set new_dests
;
3202 reg_errcode_t err2
, err3
;
3203 next_node
= dfa
->edests
[ent
->node
].elems
[0];
3204 if (re_node_set_contains (cur_nodes
, next_node
))
3206 err
= re_node_set_init_1 (&new_dests
, next_node
);
3207 err2
= check_arrival_expand_ecl (dfa
, &new_dests
, subexp_num
, type
);
3208 err3
= re_node_set_merge (cur_nodes
, &new_dests
);
3209 re_node_set_free (&new_dests
);
3210 if (__glibc_unlikely (err
!= REG_NOERROR
|| err2
!= REG_NOERROR
3211 || err3
!= REG_NOERROR
))
3213 err
= (err
!= REG_NOERROR
? err
3214 : (err2
!= REG_NOERROR
? err2
: err3
));
3217 /* TODO: It is still inefficient... */
3222 re_node_set union_set
;
3223 next_node
= dfa
->nexts
[ent
->node
];
3224 if (mctx
->state_log
[to_idx
])
3227 if (re_node_set_contains (&mctx
->state_log
[to_idx
]->nodes
,
3230 err
= re_node_set_init_copy (&union_set
,
3231 &mctx
->state_log
[to_idx
]->nodes
);
3232 ok
= re_node_set_insert (&union_set
, next_node
);
3233 if (__glibc_unlikely (err
!= REG_NOERROR
|| ! ok
))
3235 re_node_set_free (&union_set
);
3236 err
= err
!= REG_NOERROR
? err
: REG_ESPACE
;
3242 err
= re_node_set_init_1 (&union_set
, next_node
);
3243 if (__glibc_unlikely (err
!= REG_NOERROR
))
3246 mctx
->state_log
[to_idx
] = re_acquire_state (&err
, dfa
, &union_set
);
3247 re_node_set_free (&union_set
);
3248 if (__glibc_unlikely (mctx
->state_log
[to_idx
] == NULL
3249 && err
!= REG_NOERROR
))
3253 while (ent
++->more
);
3257 /* Build transition table for the state.
3258 Return true if successful. */
3260 static bool __attribute_noinline__
3261 build_trtable (const re_dfa_t
*dfa
, re_dfastate_t
*state
)
3266 bool need_word_trtable
= false;
3267 bitset_word_t elem
, mask
;
3268 Idx ndests
; /* Number of the destination states from 'state'. */
3269 re_dfastate_t
**trtable
;
3270 re_dfastate_t
*dest_states
[SBC_MAX
];
3271 re_dfastate_t
*dest_states_word
[SBC_MAX
];
3272 re_dfastate_t
*dest_states_nl
[SBC_MAX
];
3273 re_node_set follows
;
3274 bitset_t acceptable
;
3276 /* We build DFA states which corresponds to the destination nodes
3277 from 'state'. 'dests_node[i]' represents the nodes which i-th
3278 destination state contains, and 'dests_ch[i]' represents the
3279 characters which i-th destination state accepts. */
3280 re_node_set dests_node
[SBC_MAX
];
3281 bitset_t dests_ch
[SBC_MAX
];
3283 /* Initialize transition table. */
3284 state
->word_trtable
= state
->trtable
= NULL
;
3286 /* At first, group all nodes belonging to 'state' into several
3288 ndests
= group_nodes_into_DFAstates (dfa
, state
, dests_node
, dests_ch
);
3289 if (__glibc_unlikely (ndests
<= 0))
3291 /* Return false in case of an error, true otherwise. */
3294 state
->trtable
= (re_dfastate_t
**)
3295 calloc (sizeof (re_dfastate_t
*), SBC_MAX
);
3296 if (__glibc_unlikely (state
->trtable
== NULL
))
3303 err
= re_node_set_alloc (&follows
, ndests
+ 1);
3304 if (__glibc_unlikely (err
!= REG_NOERROR
))
3307 re_node_set_free (&follows
);
3308 for (i
= 0; i
< ndests
; ++i
)
3309 re_node_set_free (dests_node
+ i
);
3313 bitset_empty (acceptable
);
3315 /* Then build the states for all destinations. */
3316 for (i
= 0; i
< ndests
; ++i
)
3319 re_node_set_empty (&follows
);
3320 /* Merge the follows of this destination states. */
3321 for (j
= 0; j
< dests_node
[i
].nelem
; ++j
)
3323 next_node
= dfa
->nexts
[dests_node
[i
].elems
[j
]];
3324 if (next_node
!= -1)
3326 err
= re_node_set_merge (&follows
, dfa
->eclosures
+ next_node
);
3327 if (__glibc_unlikely (err
!= REG_NOERROR
))
3331 dest_states
[i
] = re_acquire_state_context (&err
, dfa
, &follows
, 0);
3332 if (__glibc_unlikely (dest_states
[i
] == NULL
&& err
!= REG_NOERROR
))
3334 /* If the new state has context constraint,
3335 build appropriate states for these contexts. */
3336 if (dest_states
[i
]->has_constraint
)
3338 dest_states_word
[i
] = re_acquire_state_context (&err
, dfa
, &follows
,
3340 if (__glibc_unlikely (dest_states_word
[i
] == NULL
3341 && err
!= REG_NOERROR
))
3344 if (dest_states
[i
] != dest_states_word
[i
] && dfa
->mb_cur_max
> 1)
3345 need_word_trtable
= true;
3347 dest_states_nl
[i
] = re_acquire_state_context (&err
, dfa
, &follows
,
3349 if (__glibc_unlikely (dest_states_nl
[i
] == NULL
&& err
!= REG_NOERROR
))
3354 dest_states_word
[i
] = dest_states
[i
];
3355 dest_states_nl
[i
] = dest_states
[i
];
3357 bitset_merge (acceptable
, dests_ch
[i
]);
3360 if (!__glibc_unlikely (need_word_trtable
))
3362 /* We don't care about whether the following character is a word
3363 character, or we are in a single-byte character set so we can
3364 discern by looking at the character code: allocate a
3365 256-entry transition table. */
3366 trtable
= state
->trtable
=
3367 (re_dfastate_t
**) calloc (sizeof (re_dfastate_t
*), SBC_MAX
);
3368 if (__glibc_unlikely (trtable
== NULL
))
3371 /* For all characters ch...: */
3372 for (i
= 0; i
< BITSET_WORDS
; ++i
)
3373 for (ch
= i
* BITSET_WORD_BITS
, elem
= acceptable
[i
], mask
= 1;
3375 mask
<<= 1, elem
>>= 1, ++ch
)
3376 if (__glibc_unlikely (elem
& 1))
3378 /* There must be exactly one destination which accepts
3379 character ch. See group_nodes_into_DFAstates. */
3380 for (j
= 0; (dests_ch
[j
][i
] & mask
) == 0; ++j
)
3383 /* j-th destination accepts the word character ch. */
3384 if (dfa
->word_char
[i
] & mask
)
3385 trtable
[ch
] = dest_states_word
[j
];
3387 trtable
[ch
] = dest_states
[j
];
3392 /* We care about whether the following character is a word
3393 character, and we are in a multi-byte character set: discern
3394 by looking at the character code: build two 256-entry
3395 transition tables, one starting at trtable[0] and one
3396 starting at trtable[SBC_MAX]. */
3397 trtable
= state
->word_trtable
=
3398 (re_dfastate_t
**) calloc (sizeof (re_dfastate_t
*), 2 * SBC_MAX
);
3399 if (__glibc_unlikely (trtable
== NULL
))
3402 /* For all characters ch...: */
3403 for (i
= 0; i
< BITSET_WORDS
; ++i
)
3404 for (ch
= i
* BITSET_WORD_BITS
, elem
= acceptable
[i
], mask
= 1;
3406 mask
<<= 1, elem
>>= 1, ++ch
)
3407 if (__glibc_unlikely (elem
& 1))
3409 /* There must be exactly one destination which accepts
3410 character ch. See group_nodes_into_DFAstates. */
3411 for (j
= 0; (dests_ch
[j
][i
] & mask
) == 0; ++j
)
3414 /* j-th destination accepts the word character ch. */
3415 trtable
[ch
] = dest_states
[j
];
3416 trtable
[ch
+ SBC_MAX
] = dest_states_word
[j
];
3421 if (bitset_contain (acceptable
, NEWLINE_CHAR
))
3423 /* The current state accepts newline character. */
3424 for (j
= 0; j
< ndests
; ++j
)
3425 if (bitset_contain (dests_ch
[j
], NEWLINE_CHAR
))
3427 /* k-th destination accepts newline character. */
3428 trtable
[NEWLINE_CHAR
] = dest_states_nl
[j
];
3429 if (need_word_trtable
)
3430 trtable
[NEWLINE_CHAR
+ SBC_MAX
] = dest_states_nl
[j
];
3431 /* There must be only one destination which accepts
3432 newline. See group_nodes_into_DFAstates. */
3437 re_node_set_free (&follows
);
3438 for (i
= 0; i
< ndests
; ++i
)
3439 re_node_set_free (dests_node
+ i
);
3443 /* Group all nodes belonging to STATE into several destinations.
3444 Then for all destinations, set the nodes belonging to the destination
3445 to DESTS_NODE[i] and set the characters accepted by the destination
3446 to DEST_CH[i]. Return the number of destinations if successful,
3447 -1 on internal error. */
3450 group_nodes_into_DFAstates (const re_dfa_t
*dfa
, const re_dfastate_t
*state
,
3451 re_node_set
*dests_node
, bitset_t
*dests_ch
)
3456 Idx ndests
; /* Number of the destinations from 'state'. */
3457 bitset_t accepts
; /* Characters a node can accept. */
3458 const re_node_set
*cur_nodes
= &state
->nodes
;
3459 bitset_empty (accepts
);
3462 /* For all the nodes belonging to 'state', */
3463 for (i
= 0; i
< cur_nodes
->nelem
; ++i
)
3465 re_token_t
*node
= &dfa
->nodes
[cur_nodes
->elems
[i
]];
3466 re_token_type_t type
= node
->type
;
3467 unsigned int constraint
= node
->constraint
;
3469 /* Enumerate all single byte character this node can accept. */
3470 if (type
== CHARACTER
)
3471 bitset_set (accepts
, node
->opr
.c
);
3472 else if (type
== SIMPLE_BRACKET
)
3474 bitset_merge (accepts
, node
->opr
.sbcset
);
3476 else if (type
== OP_PERIOD
)
3478 #ifdef RE_ENABLE_I18N
3479 if (dfa
->mb_cur_max
> 1)
3480 bitset_merge (accepts
, dfa
->sb_char
);
3483 bitset_set_all (accepts
);
3484 if (!(dfa
->syntax
& RE_DOT_NEWLINE
))
3485 bitset_clear (accepts
, '\n');
3486 if (dfa
->syntax
& RE_DOT_NOT_NULL
)
3487 bitset_clear (accepts
, '\0');
3489 #ifdef RE_ENABLE_I18N
3490 else if (type
== OP_UTF8_PERIOD
)
3492 if (ASCII_CHARS
% BITSET_WORD_BITS
== 0)
3493 memset (accepts
, -1, ASCII_CHARS
/ CHAR_BIT
);
3495 bitset_merge (accepts
, utf8_sb_map
);
3496 if (!(dfa
->syntax
& RE_DOT_NEWLINE
))
3497 bitset_clear (accepts
, '\n');
3498 if (dfa
->syntax
& RE_DOT_NOT_NULL
)
3499 bitset_clear (accepts
, '\0');
3505 /* Check the 'accepts' and sift the characters which are not
3506 match it the context. */
3509 if (constraint
& NEXT_NEWLINE_CONSTRAINT
)
3511 bool accepts_newline
= bitset_contain (accepts
, NEWLINE_CHAR
);
3512 bitset_empty (accepts
);
3513 if (accepts_newline
)
3514 bitset_set (accepts
, NEWLINE_CHAR
);
3518 if (constraint
& NEXT_ENDBUF_CONSTRAINT
)
3520 bitset_empty (accepts
);
3524 if (constraint
& NEXT_WORD_CONSTRAINT
)
3526 bitset_word_t any_set
= 0;
3527 if (type
== CHARACTER
&& !node
->word_char
)
3529 bitset_empty (accepts
);
3532 #ifdef RE_ENABLE_I18N
3533 if (dfa
->mb_cur_max
> 1)
3534 for (j
= 0; j
< BITSET_WORDS
; ++j
)
3535 any_set
|= (accepts
[j
] &= (dfa
->word_char
[j
] | ~dfa
->sb_char
[j
]));
3538 for (j
= 0; j
< BITSET_WORDS
; ++j
)
3539 any_set
|= (accepts
[j
] &= dfa
->word_char
[j
]);
3543 if (constraint
& NEXT_NOTWORD_CONSTRAINT
)
3545 bitset_word_t any_set
= 0;
3546 if (type
== CHARACTER
&& node
->word_char
)
3548 bitset_empty (accepts
);
3551 #ifdef RE_ENABLE_I18N
3552 if (dfa
->mb_cur_max
> 1)
3553 for (j
= 0; j
< BITSET_WORDS
; ++j
)
3554 any_set
|= (accepts
[j
] &= ~(dfa
->word_char
[j
] & dfa
->sb_char
[j
]));
3557 for (j
= 0; j
< BITSET_WORDS
; ++j
)
3558 any_set
|= (accepts
[j
] &= ~dfa
->word_char
[j
]);
3564 /* Then divide 'accepts' into DFA states, or create a new
3565 state. Above, we make sure that accepts is not empty. */
3566 for (j
= 0; j
< ndests
; ++j
)
3568 bitset_t intersec
; /* Intersection sets, see below. */
3570 /* Flags, see below. */
3571 bitset_word_t has_intersec
, not_subset
, not_consumed
;
3573 /* Optimization, skip if this state doesn't accept the character. */
3574 if (type
== CHARACTER
&& !bitset_contain (dests_ch
[j
], node
->opr
.c
))
3577 /* Enumerate the intersection set of this state and 'accepts'. */
3579 for (k
= 0; k
< BITSET_WORDS
; ++k
)
3580 has_intersec
|= intersec
[k
] = accepts
[k
] & dests_ch
[j
][k
];
3581 /* And skip if the intersection set is empty. */
3585 /* Then check if this state is a subset of 'accepts'. */
3586 not_subset
= not_consumed
= 0;
3587 for (k
= 0; k
< BITSET_WORDS
; ++k
)
3589 not_subset
|= remains
[k
] = ~accepts
[k
] & dests_ch
[j
][k
];
3590 not_consumed
|= accepts
[k
] = accepts
[k
] & ~dests_ch
[j
][k
];
3593 /* If this state isn't a subset of 'accepts', create a
3594 new group state, which has the 'remains'. */
3597 bitset_copy (dests_ch
[ndests
], remains
);
3598 bitset_copy (dests_ch
[j
], intersec
);
3599 err
= re_node_set_init_copy (dests_node
+ ndests
, &dests_node
[j
]);
3600 if (__glibc_unlikely (err
!= REG_NOERROR
))
3605 /* Put the position in the current group. */
3606 ok
= re_node_set_insert (&dests_node
[j
], cur_nodes
->elems
[i
]);
3607 if (__glibc_unlikely (! ok
))
3610 /* If all characters are consumed, go to next node. */
3614 /* Some characters remain, create a new group. */
3617 bitset_copy (dests_ch
[ndests
], accepts
);
3618 err
= re_node_set_init_1 (dests_node
+ ndests
, cur_nodes
->elems
[i
]);
3619 if (__glibc_unlikely (err
!= REG_NOERROR
))
3622 bitset_empty (accepts
);
3625 assume (ndests
<= SBC_MAX
);
3628 for (j
= 0; j
< ndests
; ++j
)
3629 re_node_set_free (dests_node
+ j
);
3633 #ifdef RE_ENABLE_I18N
3634 /* Check how many bytes the node 'dfa->nodes[node_idx]' accepts.
3635 Return the number of the bytes the node accepts.
3636 STR_IDX is the current index of the input string.
3638 This function handles the nodes which can accept one character, or
3639 one collating element like '.', '[a-z]', opposite to the other nodes
3640 can only accept one byte. */
3643 # include <locale/weight.h>
3647 check_node_accept_bytes (const re_dfa_t
*dfa
, Idx node_idx
,
3648 const re_string_t
*input
, Idx str_idx
)
3650 const re_token_t
*node
= dfa
->nodes
+ node_idx
;
3651 int char_len
, elem_len
;
3654 if (__glibc_unlikely (node
->type
== OP_UTF8_PERIOD
))
3656 unsigned char c
= re_string_byte_at (input
, str_idx
), d
;
3657 if (__glibc_likely (c
< 0xc2))
3660 if (str_idx
+ 2 > input
->len
)
3663 d
= re_string_byte_at (input
, str_idx
+ 1);
3665 return (d
< 0x80 || d
> 0xbf) ? 0 : 2;
3669 if (c
== 0xe0 && d
< 0xa0)
3675 if (c
== 0xf0 && d
< 0x90)
3681 if (c
== 0xf8 && d
< 0x88)
3687 if (c
== 0xfc && d
< 0x84)
3693 if (str_idx
+ char_len
> input
->len
)
3696 for (i
= 1; i
< char_len
; ++i
)
3698 d
= re_string_byte_at (input
, str_idx
+ i
);
3699 if (d
< 0x80 || d
> 0xbf)
3705 char_len
= re_string_char_size_at (input
, str_idx
);
3706 if (node
->type
== OP_PERIOD
)
3710 /* FIXME: I don't think this if is needed, as both '\n'
3711 and '\0' are char_len == 1. */
3712 /* '.' accepts any one character except the following two cases. */
3713 if ((!(dfa
->syntax
& RE_DOT_NEWLINE
)
3714 && re_string_byte_at (input
, str_idx
) == '\n')
3715 || ((dfa
->syntax
& RE_DOT_NOT_NULL
)
3716 && re_string_byte_at (input
, str_idx
) == '\0'))
3721 elem_len
= re_string_elem_size_at (input
, str_idx
);
3722 if ((elem_len
<= 1 && char_len
<= 1) || char_len
== 0)
3725 if (node
->type
== COMPLEX_BRACKET
)
3727 const re_charset_t
*cset
= node
->opr
.mbcset
;
3729 const unsigned char *pin
3730 = ((const unsigned char *) re_string_get_buffer (input
) + str_idx
);
3735 wchar_t wc
= ((cset
->nranges
|| cset
->nchar_classes
|| cset
->nmbchars
)
3736 ? re_string_wchar_at (input
, str_idx
) : 0);
3738 /* match with multibyte character? */
3739 for (i
= 0; i
< cset
->nmbchars
; ++i
)
3740 if (wc
== cset
->mbchars
[i
])
3742 match_len
= char_len
;
3743 goto check_node_accept_bytes_match
;
3745 /* match with character_class? */
3746 for (i
= 0; i
< cset
->nchar_classes
; ++i
)
3748 wctype_t wt
= cset
->char_classes
[i
];
3749 if (__iswctype (wc
, wt
))
3751 match_len
= char_len
;
3752 goto check_node_accept_bytes_match
;
3757 nrules
= _NL_CURRENT_WORD (LC_COLLATE
, _NL_COLLATE_NRULES
);
3760 unsigned int in_collseq
= 0;
3761 const int32_t *table
, *indirect
;
3762 const unsigned char *weights
, *extra
;
3763 const char *collseqwc
;
3765 /* match with collating_symbol? */
3766 if (cset
->ncoll_syms
)
3767 extra
= (const unsigned char *)
3768 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_SYMB_EXTRAMB
);
3769 for (i
= 0; i
< cset
->ncoll_syms
; ++i
)
3771 const unsigned char *coll_sym
= extra
+ cset
->coll_syms
[i
];
3772 /* Compare the length of input collating element and
3773 the length of current collating element. */
3774 if (*coll_sym
!= elem_len
)
3776 /* Compare each bytes. */
3777 for (j
= 0; j
< *coll_sym
; j
++)
3778 if (pin
[j
] != coll_sym
[1 + j
])
3782 /* Match if every bytes is equal. */
3784 goto check_node_accept_bytes_match
;
3790 if (elem_len
<= char_len
)
3792 collseqwc
= _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_COLLSEQWC
);
3793 in_collseq
= __collseq_table_lookup (collseqwc
, wc
);
3796 in_collseq
= find_collation_sequence_value (pin
, elem_len
);
3798 /* match with range expression? */
3799 /* FIXME: Implement rational ranges here, too. */
3800 for (i
= 0; i
< cset
->nranges
; ++i
)
3801 if (cset
->range_starts
[i
] <= in_collseq
3802 && in_collseq
<= cset
->range_ends
[i
])
3804 match_len
= elem_len
;
3805 goto check_node_accept_bytes_match
;
3808 /* match with equivalence_class? */
3809 if (cset
->nequiv_classes
)
3811 const unsigned char *cp
= pin
;
3812 table
= (const int32_t *)
3813 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_TABLEMB
);
3814 weights
= (const unsigned char *)
3815 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_WEIGHTMB
);
3816 extra
= (const unsigned char *)
3817 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_EXTRAMB
);
3818 indirect
= (const int32_t *)
3819 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_INDIRECTMB
);
3820 int32_t idx
= findidx (table
, indirect
, extra
, &cp
, elem_len
);
3821 int32_t rule
= idx
>> 24;
3825 size_t weight_len
= weights
[idx
];
3826 for (i
= 0; i
< cset
->nequiv_classes
; ++i
)
3828 int32_t equiv_class_idx
= cset
->equiv_classes
[i
];
3829 int32_t equiv_class_rule
= equiv_class_idx
>> 24;
3830 equiv_class_idx
&= 0xffffff;
3831 if (weights
[equiv_class_idx
] == weight_len
3832 && equiv_class_rule
== rule
3833 && memcmp (weights
+ idx
+ 1,
3834 weights
+ equiv_class_idx
+ 1,
3837 match_len
= elem_len
;
3838 goto check_node_accept_bytes_match
;
3847 /* match with range expression? */
3848 for (i
= 0; i
< cset
->nranges
; ++i
)
3850 if (cset
->range_starts
[i
] <= wc
&& wc
<= cset
->range_ends
[i
])
3852 match_len
= char_len
;
3853 goto check_node_accept_bytes_match
;
3857 check_node_accept_bytes_match
:
3858 if (!cset
->non_match
)
3865 return (elem_len
> char_len
) ? elem_len
: char_len
;
3873 find_collation_sequence_value (const unsigned char *mbs
, size_t mbs_len
)
3875 uint32_t nrules
= _NL_CURRENT_WORD (LC_COLLATE
, _NL_COLLATE_NRULES
);
3880 /* No valid character. Match it as a single byte character. */
3881 const unsigned char *collseq
= (const unsigned char *)
3882 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_COLLSEQMB
);
3883 return collseq
[mbs
[0]];
3890 const unsigned char *extra
= (const unsigned char *)
3891 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_SYMB_EXTRAMB
);
3892 int32_t extrasize
= (const unsigned char *)
3893 _NL_CURRENT (LC_COLLATE
, _NL_COLLATE_SYMB_EXTRAMB
+ 1) - extra
;
3895 for (idx
= 0; idx
< extrasize
;)
3899 int32_t elem_mbs_len
;
3900 /* Skip the name of collating element name. */
3901 idx
= idx
+ extra
[idx
] + 1;
3902 elem_mbs_len
= extra
[idx
++];
3903 if (mbs_len
== elem_mbs_len
)
3905 for (mbs_cnt
= 0; mbs_cnt
< elem_mbs_len
; ++mbs_cnt
)
3906 if (extra
[idx
+ mbs_cnt
] != mbs
[mbs_cnt
])
3908 if (mbs_cnt
== elem_mbs_len
)
3909 /* Found the entry. */
3912 /* Skip the byte sequence of the collating element. */
3913 idx
+= elem_mbs_len
;
3914 /* Adjust for the alignment. */
3915 idx
= (idx
+ 3) & ~3;
3916 /* Skip the collation sequence value. */
3917 idx
+= sizeof (uint32_t);
3918 /* Skip the wide char sequence of the collating element. */
3919 idx
= idx
+ sizeof (uint32_t) * (*(int32_t *) (extra
+ idx
) + 1);
3920 /* If we found the entry, return the sequence value. */
3922 return *(uint32_t *) (extra
+ idx
);
3923 /* Skip the collation sequence value. */
3924 idx
+= sizeof (uint32_t);
3930 #endif /* RE_ENABLE_I18N */
3932 /* Check whether the node accepts the byte which is IDX-th
3933 byte of the INPUT. */
3936 check_node_accept (const re_match_context_t
*mctx
, const re_token_t
*node
,
3940 ch
= re_string_byte_at (&mctx
->input
, idx
);
3944 if (node
->opr
.c
!= ch
)
3948 case SIMPLE_BRACKET
:
3949 if (!bitset_contain (node
->opr
.sbcset
, ch
))
3953 #ifdef RE_ENABLE_I18N
3954 case OP_UTF8_PERIOD
:
3955 if (ch
>= ASCII_CHARS
)
3960 if ((ch
== '\n' && !(mctx
->dfa
->syntax
& RE_DOT_NEWLINE
))
3961 || (ch
== '\0' && (mctx
->dfa
->syntax
& RE_DOT_NOT_NULL
)))
3969 if (node
->constraint
)
3971 /* The node has constraints. Check whether the current context
3972 satisfies the constraints. */
3973 unsigned int context
= re_string_context_at (&mctx
->input
, idx
,
3975 if (NOT_SATISFY_NEXT_CONSTRAINT (node
->constraint
, context
))
3982 /* Extend the buffers, if the buffers have run out. */
3984 static reg_errcode_t
3985 __attribute_warn_unused_result__
3986 extend_buffers (re_match_context_t
*mctx
, int min_len
)
3989 re_string_t
*pstr
= &mctx
->input
;
3991 /* Avoid overflow. */
3992 if (__glibc_unlikely (MIN (IDX_MAX
, SIZE_MAX
/ sizeof (re_dfastate_t
*)) / 2
3996 /* Double the lengths of the buffers, but allocate at least MIN_LEN. */
3997 ret
= re_string_realloc_buffers (pstr
,
3999 MIN (pstr
->len
, pstr
->bufs_len
* 2)));
4000 if (__glibc_unlikely (ret
!= REG_NOERROR
))
4003 if (mctx
->state_log
!= NULL
)
4005 /* And double the length of state_log. */
4006 /* XXX We have no indication of the size of this buffer. If this
4007 allocation fail we have no indication that the state_log array
4008 does not have the right size. */
4009 re_dfastate_t
**new_array
= re_realloc (mctx
->state_log
, re_dfastate_t
*,
4010 pstr
->bufs_len
+ 1);
4011 if (__glibc_unlikely (new_array
== NULL
))
4013 mctx
->state_log
= new_array
;
4016 /* Then reconstruct the buffers. */
4019 #ifdef RE_ENABLE_I18N
4020 if (pstr
->mb_cur_max
> 1)
4022 ret
= build_wcs_upper_buffer (pstr
);
4023 if (__glibc_unlikely (ret
!= REG_NOERROR
))
4027 #endif /* RE_ENABLE_I18N */
4028 build_upper_buffer (pstr
);
4032 #ifdef RE_ENABLE_I18N
4033 if (pstr
->mb_cur_max
> 1)
4034 build_wcs_buffer (pstr
);
4036 #endif /* RE_ENABLE_I18N */
4038 if (pstr
->trans
!= NULL
)
4039 re_string_translate_buffer (pstr
);
4046 /* Functions for matching context. */
4048 /* Initialize MCTX. */
4050 static reg_errcode_t
4051 __attribute_warn_unused_result__
4052 match_ctx_init (re_match_context_t
*mctx
, int eflags
, Idx n
)
4054 mctx
->eflags
= eflags
;
4055 mctx
->match_last
= -1;
4058 /* Avoid overflow. */
4059 size_t max_object_size
=
4060 MAX (sizeof (struct re_backref_cache_entry
),
4061 sizeof (re_sub_match_top_t
*));
4062 if (__glibc_unlikely (MIN (IDX_MAX
, SIZE_MAX
/ max_object_size
) < n
))
4065 mctx
->bkref_ents
= re_malloc (struct re_backref_cache_entry
, n
);
4066 mctx
->sub_tops
= re_malloc (re_sub_match_top_t
*, n
);
4067 if (__glibc_unlikely (mctx
->bkref_ents
== NULL
|| mctx
->sub_tops
== NULL
))
4070 /* Already zero-ed by the caller.
4072 mctx->bkref_ents = NULL;
4073 mctx->nbkref_ents = 0;
4074 mctx->nsub_tops = 0; */
4075 mctx
->abkref_ents
= n
;
4076 mctx
->max_mb_elem_len
= 1;
4077 mctx
->asub_tops
= n
;
4081 /* Clean the entries which depend on the current input in MCTX.
4082 This function must be invoked when the matcher changes the start index
4083 of the input, or changes the input string. */
4086 match_ctx_clean (re_match_context_t
*mctx
)
4089 for (st_idx
= 0; st_idx
< mctx
->nsub_tops
; ++st_idx
)
4092 re_sub_match_top_t
*top
= mctx
->sub_tops
[st_idx
];
4093 for (sl_idx
= 0; sl_idx
< top
->nlasts
; ++sl_idx
)
4095 re_sub_match_last_t
*last
= top
->lasts
[sl_idx
];
4096 re_free (last
->path
.array
);
4099 re_free (top
->lasts
);
4102 re_free (top
->path
->array
);
4103 re_free (top
->path
);
4108 mctx
->nsub_tops
= 0;
4109 mctx
->nbkref_ents
= 0;
4112 /* Free all the memory associated with MCTX. */
4115 match_ctx_free (re_match_context_t
*mctx
)
4117 /* First, free all the memory associated with MCTX->SUB_TOPS. */
4118 match_ctx_clean (mctx
);
4119 re_free (mctx
->sub_tops
);
4120 re_free (mctx
->bkref_ents
);
4123 /* Add a new backreference entry to MCTX.
4124 Note that we assume that caller never call this function with duplicate
4125 entry, and call with STR_IDX which isn't smaller than any existing entry.
4128 static reg_errcode_t
4129 __attribute_warn_unused_result__
4130 match_ctx_add_entry (re_match_context_t
*mctx
, Idx node
, Idx str_idx
, Idx from
,
4133 if (mctx
->nbkref_ents
>= mctx
->abkref_ents
)
4135 struct re_backref_cache_entry
* new_entry
;
4136 new_entry
= re_realloc (mctx
->bkref_ents
, struct re_backref_cache_entry
,
4137 mctx
->abkref_ents
* 2);
4138 if (__glibc_unlikely (new_entry
== NULL
))
4140 re_free (mctx
->bkref_ents
);
4143 mctx
->bkref_ents
= new_entry
;
4144 memset (mctx
->bkref_ents
+ mctx
->nbkref_ents
, '\0',
4145 sizeof (struct re_backref_cache_entry
) * mctx
->abkref_ents
);
4146 mctx
->abkref_ents
*= 2;
4148 if (mctx
->nbkref_ents
> 0
4149 && mctx
->bkref_ents
[mctx
->nbkref_ents
- 1].str_idx
== str_idx
)
4150 mctx
->bkref_ents
[mctx
->nbkref_ents
- 1].more
= 1;
4152 mctx
->bkref_ents
[mctx
->nbkref_ents
].node
= node
;
4153 mctx
->bkref_ents
[mctx
->nbkref_ents
].str_idx
= str_idx
;
4154 mctx
->bkref_ents
[mctx
->nbkref_ents
].subexp_from
= from
;
4155 mctx
->bkref_ents
[mctx
->nbkref_ents
].subexp_to
= to
;
4157 /* This is a cache that saves negative results of check_dst_limits_calc_pos.
4158 If bit N is clear, means that this entry won't epsilon-transition to
4159 an OP_OPEN_SUBEXP or OP_CLOSE_SUBEXP for the N+1-th subexpression. If
4160 it is set, check_dst_limits_calc_pos_1 will recurse and try to find one
4163 A backreference does not epsilon-transition unless it is empty, so set
4164 to all zeros if FROM != TO. */
4165 mctx
->bkref_ents
[mctx
->nbkref_ents
].eps_reachable_subexps_map
4166 = (from
== to
? -1 : 0);
4168 mctx
->bkref_ents
[mctx
->nbkref_ents
++].more
= 0;
4169 if (mctx
->max_mb_elem_len
< to
- from
)
4170 mctx
->max_mb_elem_len
= to
- from
;
4174 /* Return the first entry with the same str_idx, or -1 if none is
4175 found. Note that MCTX->BKREF_ENTS is already sorted by MCTX->STR_IDX. */
4178 search_cur_bkref_entry (const re_match_context_t
*mctx
, Idx str_idx
)
4180 Idx left
, right
, mid
, last
;
4181 last
= right
= mctx
->nbkref_ents
;
4182 for (left
= 0; left
< right
;)
4184 mid
= (left
+ right
) / 2;
4185 if (mctx
->bkref_ents
[mid
].str_idx
< str_idx
)
4190 if (left
< last
&& mctx
->bkref_ents
[left
].str_idx
== str_idx
)
4196 /* Register the node NODE, whose type is OP_OPEN_SUBEXP, and which matches
4199 static reg_errcode_t
4200 __attribute_warn_unused_result__
4201 match_ctx_add_subtop (re_match_context_t
*mctx
, Idx node
, Idx str_idx
)
4203 DEBUG_ASSERT (mctx
->sub_tops
!= NULL
);
4204 DEBUG_ASSERT (mctx
->asub_tops
> 0);
4205 if (__glibc_unlikely (mctx
->nsub_tops
== mctx
->asub_tops
))
4207 Idx new_asub_tops
= mctx
->asub_tops
* 2;
4208 re_sub_match_top_t
**new_array
= re_realloc (mctx
->sub_tops
,
4209 re_sub_match_top_t
*,
4211 if (__glibc_unlikely (new_array
== NULL
))
4213 mctx
->sub_tops
= new_array
;
4214 mctx
->asub_tops
= new_asub_tops
;
4216 mctx
->sub_tops
[mctx
->nsub_tops
] = calloc (1, sizeof (re_sub_match_top_t
));
4217 if (__glibc_unlikely (mctx
->sub_tops
[mctx
->nsub_tops
] == NULL
))
4219 mctx
->sub_tops
[mctx
->nsub_tops
]->node
= node
;
4220 mctx
->sub_tops
[mctx
->nsub_tops
++]->str_idx
= str_idx
;
4224 /* Register the node NODE, whose type is OP_CLOSE_SUBEXP, and which matches
4225 at STR_IDX, whose corresponding OP_OPEN_SUBEXP is SUB_TOP.
4226 Return the new entry if successful, NULL if memory is exhausted. */
4228 static re_sub_match_last_t
*
4229 match_ctx_add_sublast (re_sub_match_top_t
*subtop
, Idx node
, Idx str_idx
)
4231 re_sub_match_last_t
*new_entry
;
4232 if (__glibc_unlikely (subtop
->nlasts
== subtop
->alasts
))
4234 Idx new_alasts
= 2 * subtop
->alasts
+ 1;
4235 re_sub_match_last_t
**new_array
= re_realloc (subtop
->lasts
,
4236 re_sub_match_last_t
*,
4238 if (__glibc_unlikely (new_array
== NULL
))
4240 subtop
->lasts
= new_array
;
4241 subtop
->alasts
= new_alasts
;
4243 new_entry
= calloc (1, sizeof (re_sub_match_last_t
));
4244 if (__glibc_likely (new_entry
!= NULL
))
4246 subtop
->lasts
[subtop
->nlasts
] = new_entry
;
4247 new_entry
->node
= node
;
4248 new_entry
->str_idx
= str_idx
;
4255 sift_ctx_init (re_sift_context_t
*sctx
, re_dfastate_t
**sifted_sts
,
4256 re_dfastate_t
**limited_sts
, Idx last_node
, Idx last_str_idx
)
4258 sctx
->sifted_states
= sifted_sts
;
4259 sctx
->limited_states
= limited_sts
;
4260 sctx
->last_node
= last_node
;
4261 sctx
->last_str_idx
= last_str_idx
;
4262 re_node_set_init_empty (&sctx
->limits
);