PageRenderTime 77ms CodeModel.GetById 23ms RepoModel.GetById 0ms app.codeStats 1ms

/gnu/lib/libregex/regexec.c

https://bitbucket.org/freebsd/freebsd-head/
C | 4327 lines | 3462 code | 350 blank | 515 comment | 1042 complexity | 43689404ddbad7b8105fa2e37474affa MD5 | raw file
Possible License(s): MPL-2.0-no-copyleft-exception, BSD-3-Clause, LGPL-2.0, LGPL-2.1, BSD-2-Clause, 0BSD, JSON, AGPL-1.0, GPL-2.0
  1. /* Extended regular expression matching and search library.
  2. Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
  3. This file is part of the GNU C Library.
  4. Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
  5. The GNU C Library is free software; you can redistribute it and/or
  6. modify it under the terms of the GNU Lesser General Public
  7. License as published by the Free Software Foundation; either
  8. version 2.1 of the License, or (at your option) any later version.
  9. The GNU C Library is distributed in the hope that it will be useful,
  10. but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. Lesser General Public License for more details.
  13. You should have received a copy of the GNU Lesser General Public
  14. License along with the GNU C Library; if not, write to the Free
  15. Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
  16. 02111-1307 USA. */
  17. static reg_errcode_t match_ctx_init (re_match_context_t *cache, int eflags,
  18. int n) internal_function;
  19. static void match_ctx_clean (re_match_context_t *mctx) internal_function;
  20. static void match_ctx_free (re_match_context_t *cache) internal_function;
  21. static reg_errcode_t match_ctx_add_entry (re_match_context_t *cache, int node,
  22. int str_idx, int from, int to)
  23. internal_function;
  24. static int search_cur_bkref_entry (re_match_context_t *mctx, int str_idx)
  25. internal_function;
  26. static reg_errcode_t match_ctx_add_subtop (re_match_context_t *mctx, int node,
  27. int str_idx) internal_function;
  28. static re_sub_match_last_t * match_ctx_add_sublast (re_sub_match_top_t *subtop,
  29. int node, int str_idx)
  30. internal_function;
  31. static void sift_ctx_init (re_sift_context_t *sctx, re_dfastate_t **sifted_sts,
  32. re_dfastate_t **limited_sts, int last_node,
  33. int last_str_idx)
  34. internal_function;
  35. static reg_errcode_t re_search_internal (const regex_t *preg,
  36. const char *string, int length,
  37. int start, int range, int stop,
  38. size_t nmatch, regmatch_t pmatch[],
  39. int eflags) internal_function;
  40. static int re_search_2_stub (struct re_pattern_buffer *bufp,
  41. const char *string1, int length1,
  42. const char *string2, int length2,
  43. int start, int range, struct re_registers *regs,
  44. int stop, int ret_len) internal_function;
  45. static int re_search_stub (struct re_pattern_buffer *bufp,
  46. const char *string, int length, int start,
  47. int range, int stop, struct re_registers *regs,
  48. int ret_len) internal_function;
  49. static unsigned re_copy_regs (struct re_registers *regs, regmatch_t *pmatch,
  50. int nregs, int regs_allocated) internal_function;
  51. static inline re_dfastate_t *acquire_init_state_context
  52. (reg_errcode_t *err, const re_match_context_t *mctx, int idx)
  53. __attribute ((always_inline)) internal_function;
  54. static reg_errcode_t prune_impossible_nodes (re_match_context_t *mctx)
  55. internal_function;
  56. static int check_matching (re_match_context_t *mctx, int fl_longest_match,
  57. int *p_match_first)
  58. internal_function;
  59. static int check_halt_node_context (const re_dfa_t *dfa, int node,
  60. unsigned int context) internal_function;
  61. static int check_halt_state_context (const re_match_context_t *mctx,
  62. const re_dfastate_t *state, int idx)
  63. internal_function;
  64. static void update_regs (re_dfa_t *dfa, regmatch_t *pmatch,
  65. regmatch_t *prev_idx_match, int cur_node,
  66. int cur_idx, int nmatch) internal_function;
  67. static int proceed_next_node (const re_match_context_t *mctx,
  68. int nregs, regmatch_t *regs,
  69. int *pidx, int node, re_node_set *eps_via_nodes,
  70. struct re_fail_stack_t *fs) internal_function;
  71. static reg_errcode_t push_fail_stack (struct re_fail_stack_t *fs,
  72. int str_idx, int dest_node, int nregs,
  73. regmatch_t *regs,
  74. re_node_set *eps_via_nodes) internal_function;
  75. static int pop_fail_stack (struct re_fail_stack_t *fs, int *pidx, int nregs,
  76. regmatch_t *regs, re_node_set *eps_via_nodes) internal_function;
  77. static reg_errcode_t set_regs (const regex_t *preg,
  78. const re_match_context_t *mctx,
  79. size_t nmatch, regmatch_t *pmatch,
  80. int fl_backtrack) internal_function;
  81. static reg_errcode_t free_fail_stack_return (struct re_fail_stack_t *fs) internal_function;
  82. #ifdef RE_ENABLE_I18N
  83. static int sift_states_iter_mb (const re_match_context_t *mctx,
  84. re_sift_context_t *sctx,
  85. int node_idx, int str_idx, int max_str_idx) internal_function;
  86. #endif /* RE_ENABLE_I18N */
  87. static reg_errcode_t sift_states_backward (re_match_context_t *mctx,
  88. re_sift_context_t *sctx) internal_function;
  89. static reg_errcode_t build_sifted_states (re_match_context_t *mctx,
  90. re_sift_context_t *sctx, int str_idx,
  91. re_node_set *cur_dest) internal_function;
  92. static reg_errcode_t update_cur_sifted_state (re_match_context_t *mctx,
  93. re_sift_context_t *sctx,
  94. int str_idx,
  95. re_node_set *dest_nodes) internal_function;
  96. static reg_errcode_t add_epsilon_src_nodes (re_dfa_t *dfa,
  97. re_node_set *dest_nodes,
  98. const re_node_set *candidates) internal_function;
  99. static reg_errcode_t sub_epsilon_src_nodes (re_dfa_t *dfa, int node,
  100. re_node_set *dest_nodes,
  101. const re_node_set *and_nodes) internal_function;
  102. static int check_dst_limits (re_match_context_t *mctx, re_node_set *limits,
  103. int dst_node, int dst_idx, int src_node,
  104. int src_idx) internal_function;
  105. static int check_dst_limits_calc_pos_1 (re_match_context_t *mctx,
  106. int boundaries, int subexp_idx,
  107. int from_node, int bkref_idx) internal_function;
  108. static int check_dst_limits_calc_pos (re_match_context_t *mctx,
  109. int limit, int subexp_idx,
  110. int node, int str_idx,
  111. int bkref_idx) internal_function;
  112. static reg_errcode_t check_subexp_limits (re_dfa_t *dfa,
  113. re_node_set *dest_nodes,
  114. const re_node_set *candidates,
  115. re_node_set *limits,
  116. struct re_backref_cache_entry *bkref_ents,
  117. int str_idx) internal_function;
  118. static reg_errcode_t sift_states_bkref (re_match_context_t *mctx,
  119. re_sift_context_t *sctx,
  120. int str_idx, const re_node_set *candidates) internal_function;
  121. static reg_errcode_t clean_state_log_if_needed (re_match_context_t *mctx,
  122. int next_state_log_idx) internal_function;
  123. static reg_errcode_t merge_state_array (re_dfa_t *dfa, re_dfastate_t **dst,
  124. re_dfastate_t **src, int num) internal_function;
  125. static re_dfastate_t *find_recover_state (reg_errcode_t *err,
  126. re_match_context_t *mctx) internal_function;
  127. static re_dfastate_t *transit_state (reg_errcode_t *err,
  128. re_match_context_t *mctx,
  129. re_dfastate_t *state) internal_function;
  130. static re_dfastate_t *merge_state_with_log (reg_errcode_t *err,
  131. re_match_context_t *mctx,
  132. re_dfastate_t *next_state) internal_function;
  133. static reg_errcode_t check_subexp_matching_top (re_match_context_t *mctx,
  134. re_node_set *cur_nodes,
  135. int str_idx) internal_function;
  136. #if 0
  137. static re_dfastate_t *transit_state_sb (reg_errcode_t *err,
  138. re_match_context_t *mctx,
  139. re_dfastate_t *pstate) internal_function;
  140. #endif
  141. #ifdef RE_ENABLE_I18N
  142. static reg_errcode_t transit_state_mb (re_match_context_t *mctx,
  143. re_dfastate_t *pstate) internal_function;
  144. #endif /* RE_ENABLE_I18N */
  145. static reg_errcode_t transit_state_bkref (re_match_context_t *mctx,
  146. const re_node_set *nodes) internal_function;
  147. static reg_errcode_t get_subexp (re_match_context_t *mctx,
  148. int bkref_node, int bkref_str_idx) internal_function;
  149. static reg_errcode_t get_subexp_sub (re_match_context_t *mctx,
  150. const re_sub_match_top_t *sub_top,
  151. re_sub_match_last_t *sub_last,
  152. int bkref_node, int bkref_str) internal_function;
  153. static int find_subexp_node (const re_dfa_t *dfa, const re_node_set *nodes,
  154. int subexp_idx, int type) internal_function;
  155. static reg_errcode_t check_arrival (re_match_context_t *mctx,
  156. state_array_t *path, int top_node,
  157. int top_str, int last_node, int last_str,
  158. int type) internal_function;
  159. static reg_errcode_t check_arrival_add_next_nodes (re_match_context_t *mctx,
  160. int str_idx,
  161. re_node_set *cur_nodes,
  162. re_node_set *next_nodes) internal_function;
  163. static reg_errcode_t check_arrival_expand_ecl (re_dfa_t *dfa,
  164. re_node_set *cur_nodes,
  165. int ex_subexp, int type) internal_function;
  166. static reg_errcode_t check_arrival_expand_ecl_sub (re_dfa_t *dfa,
  167. re_node_set *dst_nodes,
  168. int target, int ex_subexp,
  169. int type) internal_function;
  170. static reg_errcode_t expand_bkref_cache (re_match_context_t *mctx,
  171. re_node_set *cur_nodes, int cur_str,
  172. int subexp_num, int type) internal_function;
  173. static int build_trtable (re_dfa_t *dfa,
  174. re_dfastate_t *state) internal_function;
  175. #ifdef RE_ENABLE_I18N
  176. static int check_node_accept_bytes (re_dfa_t *dfa, int node_idx,
  177. const re_string_t *input, int idx) internal_function;
  178. # ifdef _LIBC
  179. static unsigned int find_collation_sequence_value (const unsigned char *mbs,
  180. size_t name_len) internal_function;
  181. # endif /* _LIBC */
  182. #endif /* RE_ENABLE_I18N */
  183. static int group_nodes_into_DFAstates (re_dfa_t *dfa,
  184. const re_dfastate_t *state,
  185. re_node_set *states_node,
  186. bitset *states_ch) internal_function;
  187. static int check_node_accept (const re_match_context_t *mctx,
  188. const re_token_t *node, int idx) internal_function;
  189. static reg_errcode_t extend_buffers (re_match_context_t *mctx) internal_function;
  190. /* Entry point for POSIX code. */
  191. /* regexec searches for a given pattern, specified by PREG, in the
  192. string STRING.
  193. If NMATCH is zero or REG_NOSUB was set in the cflags argument to
  194. `regcomp', we ignore PMATCH. Otherwise, we assume PMATCH has at
  195. least NMATCH elements, and we set them to the offsets of the
  196. corresponding matched substrings.
  197. EFLAGS specifies `execution flags' which affect matching: if
  198. REG_NOTBOL is set, then ^ does not match at the beginning of the
  199. string; if REG_NOTEOL is set, then $ does not match at the end.
  200. We return 0 if we find a match and REG_NOMATCH if not. */
  201. int
  202. regexec (preg, string, nmatch, pmatch, eflags)
  203. const regex_t *__restrict preg;
  204. const char *__restrict string;
  205. size_t nmatch;
  206. regmatch_t pmatch[];
  207. int eflags;
  208. {
  209. reg_errcode_t err;
  210. int start, length;
  211. if (eflags & ~(REG_NOTBOL | REG_NOTEOL | REG_STARTEND))
  212. return REG_BADPAT;
  213. if (eflags & REG_STARTEND)
  214. {
  215. start = pmatch[0].rm_so;
  216. length = pmatch[0].rm_eo;
  217. }
  218. else
  219. {
  220. start = 0;
  221. length = strlen (string);
  222. }
  223. if (preg->no_sub)
  224. err = re_search_internal (preg, string, length, start, length - start,
  225. length, 0, NULL, eflags);
  226. else
  227. err = re_search_internal (preg, string, length, start, length - start,
  228. length, nmatch, pmatch, eflags);
  229. return err != REG_NOERROR;
  230. }
  231. #ifdef _LIBC
  232. # include <shlib-compat.h>
  233. versioned_symbol (libc, __regexec, regexec, GLIBC_2_3_4);
  234. # if SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)
  235. __typeof__ (__regexec) __compat_regexec;
  236. int
  237. attribute_compat_text_section
  238. __compat_regexec (const regex_t *__restrict preg,
  239. const char *__restrict string, size_t nmatch,
  240. regmatch_t pmatch[], int eflags)
  241. {
  242. return regexec (preg, string, nmatch, pmatch,
  243. eflags & (REG_NOTBOL | REG_NOTEOL));
  244. }
  245. compat_symbol (libc, __compat_regexec, regexec, GLIBC_2_0);
  246. # endif
  247. #endif
  248. /* Entry points for GNU code. */
  249. /* re_match, re_search, re_match_2, re_search_2
  250. The former two functions operate on STRING with length LENGTH,
  251. while the later two operate on concatenation of STRING1 and STRING2
  252. with lengths LENGTH1 and LENGTH2, respectively.
  253. re_match() matches the compiled pattern in BUFP against the string,
  254. starting at index START.
  255. re_search() first tries matching at index START, then it tries to match
  256. starting from index START + 1, and so on. The last start position tried
  257. is START + RANGE. (Thus RANGE = 0 forces re_search to operate the same
  258. way as re_match().)
  259. The parameter STOP of re_{match,search}_2 specifies that no match exceeding
  260. the first STOP characters of the concatenation of the strings should be
  261. concerned.
  262. If REGS is not NULL, and BUFP->no_sub is not set, the offsets of the match
  263. and all groups is stroed in REGS. (For the "_2" variants, the offsets are
  264. computed relative to the concatenation, not relative to the individual
  265. strings.)
  266. On success, re_match* functions return the length of the match, re_search*
  267. return the position of the start of the match. Return value -1 means no
  268. match was found and -2 indicates an internal error. */
  269. int
  270. re_match (bufp, string, length, start, regs)
  271. struct re_pattern_buffer *bufp;
  272. const char *string;
  273. int length, start;
  274. struct re_registers *regs;
  275. {
  276. return re_search_stub (bufp, string, length, start, 0, length, regs, 1);
  277. }
  278. #ifdef _LIBC
  279. weak_alias (__re_match, re_match)
  280. #endif
  281. int
  282. re_search (bufp, string, length, start, range, regs)
  283. struct re_pattern_buffer *bufp;
  284. const char *string;
  285. int length, start, range;
  286. struct re_registers *regs;
  287. {
  288. return re_search_stub (bufp, string, length, start, range, length, regs, 0);
  289. }
  290. #ifdef _LIBC
  291. weak_alias (__re_search, re_search)
  292. #endif
  293. int
  294. re_match_2 (bufp, string1, length1, string2, length2, start, regs, stop)
  295. struct re_pattern_buffer *bufp;
  296. const char *string1, *string2;
  297. int length1, length2, start, stop;
  298. struct re_registers *regs;
  299. {
  300. return re_search_2_stub (bufp, string1, length1, string2, length2,
  301. start, 0, regs, stop, 1);
  302. }
  303. #ifdef _LIBC
  304. weak_alias (__re_match_2, re_match_2)
  305. #endif
  306. int
  307. re_search_2 (bufp, string1, length1, string2, length2, start, range, regs, stop)
  308. struct re_pattern_buffer *bufp;
  309. const char *string1, *string2;
  310. int length1, length2, start, range, stop;
  311. struct re_registers *regs;
  312. {
  313. return re_search_2_stub (bufp, string1, length1, string2, length2,
  314. start, range, regs, stop, 0);
  315. }
  316. #ifdef _LIBC
  317. weak_alias (__re_search_2, re_search_2)
  318. #endif
  319. static int
  320. re_search_2_stub (bufp, string1, length1, string2, length2, start, range, regs,
  321. stop, ret_len)
  322. struct re_pattern_buffer *bufp;
  323. const char *string1, *string2;
  324. int length1, length2, start, range, stop, ret_len;
  325. struct re_registers *regs;
  326. {
  327. const char *str;
  328. int rval;
  329. int len = length1 + length2;
  330. int free_str = 0;
  331. if (BE (length1 < 0 || length2 < 0 || stop < 0, 0))
  332. return -2;
  333. /* Concatenate the strings. */
  334. if (length2 > 0)
  335. if (length1 > 0)
  336. {
  337. char *s = re_malloc (char, len);
  338. if (BE (s == NULL, 0))
  339. return -2;
  340. memcpy (s, string1, length1);
  341. memcpy (s + length1, string2, length2);
  342. str = s;
  343. free_str = 1;
  344. }
  345. else
  346. str = string2;
  347. else
  348. str = string1;
  349. rval = re_search_stub (bufp, str, len, start, range, stop, regs,
  350. ret_len);
  351. if (free_str)
  352. re_free ((char *) str);
  353. return rval;
  354. }
  355. /* The parameters have the same meaning as those of re_search.
  356. Additional parameters:
  357. If RET_LEN is nonzero the length of the match is returned (re_match style);
  358. otherwise the position of the match is returned. */
  359. static int
  360. re_search_stub (bufp, string, length, start, range, stop, regs, ret_len)
  361. struct re_pattern_buffer *bufp;
  362. const char *string;
  363. int length, start, range, stop, ret_len;
  364. struct re_registers *regs;
  365. {
  366. reg_errcode_t result;
  367. regmatch_t *pmatch;
  368. int nregs, rval;
  369. int eflags = 0;
  370. /* Check for out-of-range. */
  371. if (BE (start < 0 || start > length, 0))
  372. return -1;
  373. if (BE (start + range > length, 0))
  374. range = length - start;
  375. else if (BE (start + range < 0, 0))
  376. range = -start;
  377. eflags |= (bufp->not_bol) ? REG_NOTBOL : 0;
  378. eflags |= (bufp->not_eol) ? REG_NOTEOL : 0;
  379. /* Compile fastmap if we haven't yet. */
  380. if (range > 0 && bufp->fastmap != NULL && !bufp->fastmap_accurate)
  381. re_compile_fastmap (bufp);
  382. if (BE (bufp->no_sub, 0))
  383. regs = NULL;
  384. /* We need at least 1 register. */
  385. if (regs == NULL)
  386. nregs = 1;
  387. else if (BE (bufp->regs_allocated == REGS_FIXED &&
  388. regs->num_regs < bufp->re_nsub + 1, 0))
  389. {
  390. nregs = regs->num_regs;
  391. if (BE (nregs < 1, 0))
  392. {
  393. /* Nothing can be copied to regs. */
  394. regs = NULL;
  395. nregs = 1;
  396. }
  397. }
  398. else
  399. nregs = bufp->re_nsub + 1;
  400. pmatch = re_malloc (regmatch_t, nregs);
  401. if (BE (pmatch == NULL, 0))
  402. return -2;
  403. result = re_search_internal (bufp, string, length, start, range, stop,
  404. nregs, pmatch, eflags);
  405. rval = 0;
  406. /* I hope we needn't fill ther regs with -1's when no match was found. */
  407. if (result != REG_NOERROR)
  408. rval = -1;
  409. else if (regs != NULL)
  410. {
  411. /* If caller wants register contents data back, copy them. */
  412. bufp->regs_allocated = re_copy_regs (regs, pmatch, nregs,
  413. bufp->regs_allocated);
  414. if (BE (bufp->regs_allocated == REGS_UNALLOCATED, 0))
  415. rval = -2;
  416. }
  417. if (BE (rval == 0, 1))
  418. {
  419. if (ret_len)
  420. {
  421. assert (pmatch[0].rm_so == start);
  422. rval = pmatch[0].rm_eo - start;
  423. }
  424. else
  425. rval = pmatch[0].rm_so;
  426. }
  427. re_free (pmatch);
  428. return rval;
  429. }
  430. static unsigned
  431. re_copy_regs (regs, pmatch, nregs, regs_allocated)
  432. struct re_registers *regs;
  433. regmatch_t *pmatch;
  434. int nregs, regs_allocated;
  435. {
  436. int rval = REGS_REALLOCATE;
  437. int i;
  438. int need_regs = nregs + 1;
  439. /* We need one extra element beyond `num_regs' for the `-1' marker GNU code
  440. uses. */
  441. /* Have the register data arrays been allocated? */
  442. if (regs_allocated == REGS_UNALLOCATED)
  443. { /* No. So allocate them with malloc. */
  444. regs->start = re_malloc (regoff_t, need_regs);
  445. regs->end = re_malloc (regoff_t, need_regs);
  446. if (BE (regs->start == NULL, 0) || BE (regs->end == NULL, 0))
  447. return REGS_UNALLOCATED;
  448. regs->num_regs = need_regs;
  449. }
  450. else if (regs_allocated == REGS_REALLOCATE)
  451. { /* Yes. If we need more elements than were already
  452. allocated, reallocate them. If we need fewer, just
  453. leave it alone. */
  454. if (BE (need_regs > regs->num_regs, 0))
  455. {
  456. regoff_t *new_start = re_realloc (regs->start, regoff_t, need_regs);
  457. regoff_t *new_end = re_realloc (regs->end, regoff_t, need_regs);
  458. if (BE (new_start == NULL, 0) || BE (new_end == NULL, 0))
  459. return REGS_UNALLOCATED;
  460. regs->start = new_start;
  461. regs->end = new_end;
  462. regs->num_regs = need_regs;
  463. }
  464. }
  465. else
  466. {
  467. assert (regs_allocated == REGS_FIXED);
  468. /* This function may not be called with REGS_FIXED and nregs too big. */
  469. assert (regs->num_regs >= nregs);
  470. rval = REGS_FIXED;
  471. }
  472. /* Copy the regs. */
  473. for (i = 0; i < nregs; ++i)
  474. {
  475. regs->start[i] = pmatch[i].rm_so;
  476. regs->end[i] = pmatch[i].rm_eo;
  477. }
  478. for ( ; i < regs->num_regs; ++i)
  479. regs->start[i] = regs->end[i] = -1;
  480. return rval;
  481. }
  482. /* Set REGS to hold NUM_REGS registers, storing them in STARTS and
  483. ENDS. Subsequent matches using PATTERN_BUFFER and REGS will use
  484. this memory for recording register information. STARTS and ENDS
  485. must be allocated using the malloc library routine, and must each
  486. be at least NUM_REGS * sizeof (regoff_t) bytes long.
  487. If NUM_REGS == 0, then subsequent matches should allocate their own
  488. register data.
  489. Unless this function is called, the first search or match using
  490. PATTERN_BUFFER will allocate its own register data, without
  491. freeing the old data. */
  492. void
  493. re_set_registers (bufp, regs, num_regs, starts, ends)
  494. struct re_pattern_buffer *bufp;
  495. struct re_registers *regs;
  496. unsigned num_regs;
  497. regoff_t *starts, *ends;
  498. {
  499. if (num_regs)
  500. {
  501. bufp->regs_allocated = REGS_REALLOCATE;
  502. regs->num_regs = num_regs;
  503. regs->start = starts;
  504. regs->end = ends;
  505. }
  506. else
  507. {
  508. bufp->regs_allocated = REGS_UNALLOCATED;
  509. regs->num_regs = 0;
  510. regs->start = regs->end = (regoff_t *) 0;
  511. }
  512. }
  513. #ifdef _LIBC
  514. weak_alias (__re_set_registers, re_set_registers)
  515. #endif
  516. /* Entry points compatible with 4.2 BSD regex library. We don't define
  517. them unless specifically requested. */
  518. #if defined _REGEX_RE_COMP || defined _LIBC
  519. int
  520. # ifdef _LIBC
  521. weak_function
  522. # endif
  523. re_exec (s)
  524. const char *s;
  525. {
  526. return 0 == regexec (&re_comp_buf, s, 0, NULL, 0);
  527. }
  528. #endif /* _REGEX_RE_COMP */
  529. /* Internal entry point. */
  530. /* Searches for a compiled pattern PREG in the string STRING, whose
  531. length is LENGTH. NMATCH, PMATCH, and EFLAGS have the same
  532. mingings with regexec. START, and RANGE have the same meanings
  533. with re_search.
  534. Return REG_NOERROR if we find a match, and REG_NOMATCH if not,
  535. otherwise return the error code.
  536. Note: We assume front end functions already check ranges.
  537. (START + RANGE >= 0 && START + RANGE <= LENGTH) */
  538. static reg_errcode_t
  539. re_search_internal (preg, string, length, start, range, stop, nmatch, pmatch,
  540. eflags)
  541. const regex_t *preg;
  542. const char *string;
  543. int length, start, range, stop, eflags;
  544. size_t nmatch;
  545. regmatch_t pmatch[];
  546. {
  547. reg_errcode_t err;
  548. re_dfa_t *dfa = (re_dfa_t *)preg->buffer;
  549. int left_lim, right_lim, incr;
  550. int fl_longest_match, match_first, match_kind, match_last = -1;
  551. int extra_nmatch;
  552. int sb, ch;
  553. #if defined _LIBC || (defined __STDC_VERSION__ && __STDC_VERSION__ >= 199901L)
  554. re_match_context_t mctx = { .dfa = dfa };
  555. #else
  556. re_match_context_t mctx;
  557. #endif
  558. char *fastmap = (preg->fastmap != NULL && preg->fastmap_accurate
  559. && range && !preg->can_be_null) ? preg->fastmap : NULL;
  560. unsigned RE_TRANSLATE_TYPE t = (unsigned RE_TRANSLATE_TYPE) preg->translate;
  561. #if !(defined _LIBC || (defined __STDC_VERSION__ && __STDC_VERSION__ >= 199901L))
  562. memset (&mctx, '\0', sizeof (re_match_context_t));
  563. mctx.dfa = dfa;
  564. #endif
  565. extra_nmatch = (nmatch > preg->re_nsub) ? nmatch - (preg->re_nsub + 1) : 0;
  566. nmatch -= extra_nmatch;
  567. /* Check if the DFA haven't been compiled. */
  568. if (BE (preg->used == 0 || dfa->init_state == NULL
  569. || dfa->init_state_word == NULL || dfa->init_state_nl == NULL
  570. || dfa->init_state_begbuf == NULL, 0))
  571. return REG_NOMATCH;
  572. #ifdef DEBUG
  573. /* We assume front-end functions already check them. */
  574. assert (start + range >= 0 && start + range <= length);
  575. #endif
  576. /* If initial states with non-begbuf contexts have no elements,
  577. the regex must be anchored. If preg->newline_anchor is set,
  578. we'll never use init_state_nl, so do not check it. */
  579. if (dfa->init_state->nodes.nelem == 0
  580. && dfa->init_state_word->nodes.nelem == 0
  581. && (dfa->init_state_nl->nodes.nelem == 0
  582. || !preg->newline_anchor))
  583. {
  584. if (start != 0 && start + range != 0)
  585. return REG_NOMATCH;
  586. start = range = 0;
  587. }
  588. /* We must check the longest matching, if nmatch > 0. */
  589. fl_longest_match = (nmatch != 0 || dfa->nbackref);
  590. err = re_string_allocate (&mctx.input, string, length, dfa->nodes_len + 1,
  591. preg->translate, preg->syntax & RE_ICASE, dfa);
  592. if (BE (err != REG_NOERROR, 0))
  593. goto free_return;
  594. mctx.input.stop = stop;
  595. mctx.input.raw_stop = stop;
  596. mctx.input.newline_anchor = preg->newline_anchor;
  597. err = match_ctx_init (&mctx, eflags, dfa->nbackref * 2);
  598. if (BE (err != REG_NOERROR, 0))
  599. goto free_return;
  600. /* We will log all the DFA states through which the dfa pass,
  601. if nmatch > 1, or this dfa has "multibyte node", which is a
  602. back-reference or a node which can accept multibyte character or
  603. multi character collating element. */
  604. if (nmatch > 1 || dfa->has_mb_node)
  605. {
  606. mctx.state_log = re_malloc (re_dfastate_t *, mctx.input.bufs_len + 1);
  607. if (BE (mctx.state_log == NULL, 0))
  608. {
  609. err = REG_ESPACE;
  610. goto free_return;
  611. }
  612. }
  613. else
  614. mctx.state_log = NULL;
  615. match_first = start;
  616. mctx.input.tip_context = (eflags & REG_NOTBOL) ? CONTEXT_BEGBUF
  617. : CONTEXT_NEWLINE | CONTEXT_BEGBUF;
  618. /* Check incrementally whether of not the input string match. */
  619. incr = (range < 0) ? -1 : 1;
  620. left_lim = (range < 0) ? start + range : start;
  621. right_lim = (range < 0) ? start : start + range;
  622. sb = dfa->mb_cur_max == 1;
  623. match_kind =
  624. (fastmap
  625. ? ((sb || !(preg->syntax & RE_ICASE || t) ? 4 : 0)
  626. | (range >= 0 ? 2 : 0)
  627. | (t != NULL ? 1 : 0))
  628. : 8);
  629. for (;; match_first += incr)
  630. {
  631. err = REG_NOMATCH;
  632. if (match_first < left_lim || right_lim < match_first)
  633. goto free_return;
  634. /* Advance as rapidly as possible through the string, until we
  635. find a plausible place to start matching. This may be done
  636. with varying efficiency, so there are various possibilities:
  637. only the most common of them are specialized, in order to
  638. save on code size. We use a switch statement for speed. */
  639. switch (match_kind)
  640. {
  641. case 8:
  642. /* No fastmap. */
  643. break;
  644. case 7:
  645. /* Fastmap with single-byte translation, match forward. */
  646. while (BE (match_first < right_lim, 1)
  647. && !fastmap[t[(unsigned char) string[match_first]]])
  648. ++match_first;
  649. goto forward_match_found_start_or_reached_end;
  650. case 6:
  651. /* Fastmap without translation, match forward. */
  652. while (BE (match_first < right_lim, 1)
  653. && !fastmap[(unsigned char) string[match_first]])
  654. ++match_first;
  655. forward_match_found_start_or_reached_end:
  656. if (BE (match_first == right_lim, 0))
  657. {
  658. ch = match_first >= length
  659. ? 0 : (unsigned char) string[match_first];
  660. if (!fastmap[t ? t[ch] : ch])
  661. goto free_return;
  662. }
  663. break;
  664. case 4:
  665. case 5:
  666. /* Fastmap without multi-byte translation, match backwards. */
  667. while (match_first >= left_lim)
  668. {
  669. ch = match_first >= length
  670. ? 0 : (unsigned char) string[match_first];
  671. if (fastmap[t ? t[ch] : ch])
  672. break;
  673. --match_first;
  674. }
  675. if (match_first < left_lim)
  676. goto free_return;
  677. break;
  678. default:
  679. /* In this case, we can't determine easily the current byte,
  680. since it might be a component byte of a multibyte
  681. character. Then we use the constructed buffer instead. */
  682. for (;;)
  683. {
  684. /* If MATCH_FIRST is out of the valid range, reconstruct the
  685. buffers. */
  686. unsigned int offset = match_first - mctx.input.raw_mbs_idx;
  687. if (BE (offset >= (unsigned int) mctx.input.valid_raw_len, 0))
  688. {
  689. err = re_string_reconstruct (&mctx.input, match_first,
  690. eflags);
  691. if (BE (err != REG_NOERROR, 0))
  692. goto free_return;
  693. offset = match_first - mctx.input.raw_mbs_idx;
  694. }
  695. /* If MATCH_FIRST is out of the buffer, leave it as '\0'.
  696. Note that MATCH_FIRST must not be smaller than 0. */
  697. ch = (match_first >= length
  698. ? 0 : re_string_byte_at (&mctx.input, offset));
  699. if (fastmap[ch])
  700. break;
  701. match_first += incr;
  702. if (match_first < left_lim || match_first > right_lim)
  703. {
  704. err = REG_NOMATCH;
  705. goto free_return;
  706. }
  707. }
  708. break;
  709. }
  710. /* Reconstruct the buffers so that the matcher can assume that
  711. the matching starts from the beginning of the buffer. */
  712. err = re_string_reconstruct (&mctx.input, match_first, eflags);
  713. if (BE (err != REG_NOERROR, 0))
  714. goto free_return;
  715. #ifdef RE_ENABLE_I18N
  716. /* Don't consider this char as a possible match start if it part,
  717. yet isn't the head, of a multibyte character. */
  718. if (!sb && !re_string_first_byte (&mctx.input, 0))
  719. continue;
  720. #endif
  721. /* It seems to be appropriate one, then use the matcher. */
  722. /* We assume that the matching starts from 0. */
  723. mctx.state_log_top = mctx.nbkref_ents = mctx.max_mb_elem_len = 0;
  724. match_last = check_matching (&mctx, fl_longest_match,
  725. range >= 0 ? &match_first : NULL);
  726. if (match_last != -1)
  727. {
  728. if (BE (match_last == -2, 0))
  729. {
  730. err = REG_ESPACE;
  731. goto free_return;
  732. }
  733. else
  734. {
  735. mctx.match_last = match_last;
  736. if ((!preg->no_sub && nmatch > 1) || dfa->nbackref)
  737. {
  738. re_dfastate_t *pstate = mctx.state_log[match_last];
  739. mctx.last_node = check_halt_state_context (&mctx, pstate,
  740. match_last);
  741. }
  742. if ((!preg->no_sub && nmatch > 1 && dfa->has_plural_match)
  743. || dfa->nbackref)
  744. {
  745. err = prune_impossible_nodes (&mctx);
  746. if (err == REG_NOERROR)
  747. break;
  748. if (BE (err != REG_NOMATCH, 0))
  749. goto free_return;
  750. match_last = -1;
  751. }
  752. else
  753. break; /* We found a match. */
  754. }
  755. }
  756. match_ctx_clean (&mctx);
  757. }
  758. #ifdef DEBUG
  759. assert (match_last != -1);
  760. assert (err == REG_NOERROR);
  761. #endif
  762. /* Set pmatch[] if we need. */
  763. if (nmatch > 0)
  764. {
  765. int reg_idx;
  766. /* Initialize registers. */
  767. for (reg_idx = 1; reg_idx < nmatch; ++reg_idx)
  768. pmatch[reg_idx].rm_so = pmatch[reg_idx].rm_eo = -1;
  769. /* Set the points where matching start/end. */
  770. pmatch[0].rm_so = 0;
  771. pmatch[0].rm_eo = mctx.match_last;
  772. if (!preg->no_sub && nmatch > 1)
  773. {
  774. err = set_regs (preg, &mctx, nmatch, pmatch,
  775. dfa->has_plural_match && dfa->nbackref > 0);
  776. if (BE (err != REG_NOERROR, 0))
  777. goto free_return;
  778. }
  779. /* At last, add the offset to the each registers, since we slided
  780. the buffers so that we could assume that the matching starts
  781. from 0. */
  782. for (reg_idx = 0; reg_idx < nmatch; ++reg_idx)
  783. if (pmatch[reg_idx].rm_so != -1)
  784. {
  785. #ifdef RE_ENABLE_I18N
  786. if (BE (mctx.input.offsets_needed != 0, 0))
  787. {
  788. if (pmatch[reg_idx].rm_so == mctx.input.valid_len)
  789. pmatch[reg_idx].rm_so += mctx.input.valid_raw_len - mctx.input.valid_len;
  790. else
  791. pmatch[reg_idx].rm_so = mctx.input.offsets[pmatch[reg_idx].rm_so];
  792. if (pmatch[reg_idx].rm_eo == mctx.input.valid_len)
  793. pmatch[reg_idx].rm_eo += mctx.input.valid_raw_len - mctx.input.valid_len;
  794. else
  795. pmatch[reg_idx].rm_eo = mctx.input.offsets[pmatch[reg_idx].rm_eo];
  796. }
  797. #else
  798. assert (mctx.input.offsets_needed == 0);
  799. #endif
  800. pmatch[reg_idx].rm_so += match_first;
  801. pmatch[reg_idx].rm_eo += match_first;
  802. }
  803. for (reg_idx = 0; reg_idx < extra_nmatch; ++reg_idx)
  804. {
  805. pmatch[nmatch + reg_idx].rm_so = -1;
  806. pmatch[nmatch + reg_idx].rm_eo = -1;
  807. }
  808. if (dfa->subexp_map)
  809. for (reg_idx = 0; reg_idx + 1 < nmatch; reg_idx++)
  810. if (dfa->subexp_map[reg_idx] != reg_idx)
  811. {
  812. pmatch[reg_idx + 1].rm_so
  813. = pmatch[dfa->subexp_map[reg_idx] + 1].rm_so;
  814. pmatch[reg_idx + 1].rm_eo
  815. = pmatch[dfa->subexp_map[reg_idx] + 1].rm_eo;
  816. }
  817. }
  818. free_return:
  819. re_free (mctx.state_log);
  820. if (dfa->nbackref)
  821. match_ctx_free (&mctx);
  822. re_string_destruct (&mctx.input);
  823. return err;
  824. }
  825. static reg_errcode_t
  826. prune_impossible_nodes (mctx)
  827. re_match_context_t *mctx;
  828. {
  829. re_dfa_t *const dfa = mctx->dfa;
  830. int halt_node, match_last;
  831. reg_errcode_t ret;
  832. re_dfastate_t **sifted_states;
  833. re_dfastate_t **lim_states = NULL;
  834. re_sift_context_t sctx;
  835. #ifdef DEBUG
  836. assert (mctx->state_log != NULL);
  837. #endif
  838. match_last = mctx->match_last;
  839. halt_node = mctx->last_node;
  840. sifted_states = re_malloc (re_dfastate_t *, match_last + 1);
  841. if (BE (sifted_states == NULL, 0))
  842. {
  843. ret = REG_ESPACE;
  844. goto free_return;
  845. }
  846. if (dfa->nbackref)
  847. {
  848. lim_states = re_malloc (re_dfastate_t *, match_last + 1);
  849. if (BE (lim_states == NULL, 0))
  850. {
  851. ret = REG_ESPACE;
  852. goto free_return;
  853. }
  854. while (1)
  855. {
  856. memset (lim_states, '\0',
  857. sizeof (re_dfastate_t *) * (match_last + 1));
  858. sift_ctx_init (&sctx, sifted_states, lim_states, halt_node,
  859. match_last);
  860. ret = sift_states_backward (mctx, &sctx);
  861. re_node_set_free (&sctx.limits);
  862. if (BE (ret != REG_NOERROR, 0))
  863. goto free_return;
  864. if (sifted_states[0] != NULL || lim_states[0] != NULL)
  865. break;
  866. do
  867. {
  868. --match_last;
  869. if (match_last < 0)
  870. {
  871. ret = REG_NOMATCH;
  872. goto free_return;
  873. }
  874. } while (mctx->state_log[match_last] == NULL
  875. || !mctx->state_log[match_last]->halt);
  876. halt_node = check_halt_state_context (mctx,
  877. mctx->state_log[match_last],
  878. match_last);
  879. }
  880. ret = merge_state_array (dfa, sifted_states, lim_states,
  881. match_last + 1);
  882. re_free (lim_states);
  883. lim_states = NULL;
  884. if (BE (ret != REG_NOERROR, 0))
  885. goto free_return;
  886. }
  887. else
  888. {
  889. sift_ctx_init (&sctx, sifted_states, lim_states, halt_node, match_last);
  890. ret = sift_states_backward (mctx, &sctx);
  891. re_node_set_free (&sctx.limits);
  892. if (BE (ret != REG_NOERROR, 0))
  893. goto free_return;
  894. }
  895. re_free (mctx->state_log);
  896. mctx->state_log = sifted_states;
  897. sifted_states = NULL;
  898. mctx->last_node = halt_node;
  899. mctx->match_last = match_last;
  900. ret = REG_NOERROR;
  901. free_return:
  902. re_free (sifted_states);
  903. re_free (lim_states);
  904. return ret;
  905. }
  906. /* Acquire an initial state and return it.
  907. We must select appropriate initial state depending on the context,
  908. since initial states may have constraints like "\<", "^", etc.. */
  909. static inline re_dfastate_t *
  910. acquire_init_state_context (err, mctx, idx)
  911. reg_errcode_t *err;
  912. const re_match_context_t *mctx;
  913. int idx;
  914. {
  915. re_dfa_t *const dfa = mctx->dfa;
  916. if (dfa->init_state->has_constraint)
  917. {
  918. unsigned int context;
  919. context = re_string_context_at (&mctx->input, idx - 1, mctx->eflags);
  920. if (IS_WORD_CONTEXT (context))
  921. return dfa->init_state_word;
  922. else if (IS_ORDINARY_CONTEXT (context))
  923. return dfa->init_state;
  924. else if (IS_BEGBUF_CONTEXT (context) && IS_NEWLINE_CONTEXT (context))
  925. return dfa->init_state_begbuf;
  926. else if (IS_NEWLINE_CONTEXT (context))
  927. return dfa->init_state_nl;
  928. else if (IS_BEGBUF_CONTEXT (context))
  929. {
  930. /* It is relatively rare case, then calculate on demand. */
  931. return re_acquire_state_context (err, dfa,
  932. dfa->init_state->entrance_nodes,
  933. context);
  934. }
  935. else
  936. /* Must not happen? */
  937. return dfa->init_state;
  938. }
  939. else
  940. return dfa->init_state;
  941. }
  942. /* Check whether the regular expression match input string INPUT or not,
  943. and return the index where the matching end, return -1 if not match,
  944. or return -2 in case of an error.
  945. FL_LONGEST_MATCH means we want the POSIX longest matching.
  946. If P_MATCH_FIRST is not NULL, and the match fails, it is set to the
  947. next place where we may want to try matching.
  948. Note that the matcher assume that the maching starts from the current
  949. index of the buffer. */
  950. static int
  951. check_matching (mctx, fl_longest_match, p_match_first)
  952. re_match_context_t *mctx;
  953. int fl_longest_match;
  954. int *p_match_first;
  955. {
  956. re_dfa_t *const dfa = mctx->dfa;
  957. reg_errcode_t err;
  958. int match = 0;
  959. int match_last = -1;
  960. int cur_str_idx = re_string_cur_idx (&mctx->input);
  961. re_dfastate_t *cur_state;
  962. int at_init_state = p_match_first != NULL;
  963. int next_start_idx = cur_str_idx;
  964. err = REG_NOERROR;
  965. cur_state = acquire_init_state_context (&err, mctx, cur_str_idx);
  966. /* An initial state must not be NULL (invalid). */
  967. if (BE (cur_state == NULL, 0))
  968. {
  969. assert (err == REG_ESPACE);
  970. return -2;
  971. }
  972. if (mctx->state_log != NULL)
  973. {
  974. mctx->state_log[cur_str_idx] = cur_state;
  975. /* Check OP_OPEN_SUBEXP in the initial state in case that we use them
  976. later. E.g. Processing back references. */
  977. if (BE (dfa->nbackref, 0))
  978. {
  979. at_init_state = 0;
  980. err = check_subexp_matching_top (mctx, &cur_state->nodes, 0);
  981. if (BE (err != REG_NOERROR, 0))
  982. return err;
  983. if (cur_state->has_backref)
  984. {
  985. err = transit_state_bkref (mctx, &cur_state->nodes);
  986. if (BE (err != REG_NOERROR, 0))
  987. return err;
  988. }
  989. }
  990. }
  991. /* If the RE accepts NULL string. */
  992. if (BE (cur_state->halt, 0))
  993. {
  994. if (!cur_state->has_constraint
  995. || check_halt_state_context (mctx, cur_state, cur_str_idx))
  996. {
  997. if (!fl_longest_match)
  998. return cur_str_idx;
  999. else
  1000. {
  1001. match_last = cur_str_idx;
  1002. match = 1;
  1003. }
  1004. }
  1005. }
  1006. while (!re_string_eoi (&mctx->input))
  1007. {
  1008. re_dfastate_t *old_state = cur_state;
  1009. int next_char_idx = re_string_cur_idx (&mctx->input) + 1;
  1010. if (BE (next_char_idx >= mctx->input.bufs_len, 0)
  1011. || (BE (next_char_idx >= mctx->input.valid_len, 0)
  1012. && mctx->input.valid_len < mctx->input.len))
  1013. {
  1014. err = extend_buffers (mctx);
  1015. if (BE (err != REG_NOERROR, 0))
  1016. {
  1017. assert (err == REG_ESPACE);
  1018. return -2;
  1019. }
  1020. }
  1021. cur_state = transit_state (&err, mctx, cur_state);
  1022. if (mctx->state_log != NULL)
  1023. cur_state = merge_state_with_log (&err, mctx, cur_state);
  1024. if (cur_state == NULL)
  1025. {
  1026. /* Reached the invalid state or an error. Try to recover a valid
  1027. state using the state log, if available and if we have not
  1028. already found a valid (even if not the longest) match. */
  1029. if (BE (err != REG_NOERROR, 0))
  1030. return -2;
  1031. if (mctx->state_log == NULL
  1032. || (match && !fl_longest_match)
  1033. || (cur_state = find_recover_state (&err, mctx)) == NULL)
  1034. break;
  1035. }
  1036. if (BE (at_init_state, 0))
  1037. {
  1038. if (old_state == cur_state)
  1039. next_start_idx = next_char_idx;
  1040. else
  1041. at_init_state = 0;
  1042. }
  1043. if (cur_state->halt)
  1044. {
  1045. /* Reached a halt state.
  1046. Check the halt state can satisfy the current context. */
  1047. if (!cur_state->has_constraint
  1048. || check_halt_state_context (mctx, cur_state,
  1049. re_string_cur_idx (&mctx->input)))
  1050. {
  1051. /* We found an appropriate halt state. */
  1052. match_last = re_string_cur_idx (&mctx->input);
  1053. match = 1;
  1054. /* We found a match, do not modify match_first below. */
  1055. p_match_first = NULL;
  1056. if (!fl_longest_match)
  1057. break;
  1058. }
  1059. }
  1060. }
  1061. if (p_match_first)
  1062. *p_match_first += next_start_idx;
  1063. return match_last;
  1064. }
  1065. /* Check NODE match the current context. */
  1066. static int check_halt_node_context (dfa, node, context)
  1067. const re_dfa_t *dfa;
  1068. int node;
  1069. unsigned int context;
  1070. {
  1071. re_token_type_t type = dfa->nodes[node].type;
  1072. unsigned int constraint = dfa->nodes[node].constraint;
  1073. if (type != END_OF_RE)
  1074. return 0;
  1075. if (!constraint)
  1076. return 1;
  1077. if (NOT_SATISFY_NEXT_CONSTRAINT (constraint, context))
  1078. return 0;
  1079. return 1;
  1080. }
  1081. /* Check the halt state STATE match the current context.
  1082. Return 0 if not match, if the node, STATE has, is a halt node and
  1083. match the context, return the node. */
  1084. static int
  1085. check_halt_state_context (mctx, state, idx)
  1086. const re_match_context_t *mctx;
  1087. const re_dfastate_t *state;
  1088. int idx;
  1089. {
  1090. int i;
  1091. unsigned int context;
  1092. #ifdef DEBUG
  1093. assert (state->halt);
  1094. #endif
  1095. context = re_string_context_at (&mctx->input, idx, mctx->eflags);
  1096. for (i = 0; i < state->nodes.nelem; ++i)
  1097. if (check_halt_node_context (mctx->dfa, state->nodes.elems[i], context))
  1098. return state->nodes.elems[i];
  1099. return 0;
  1100. }
  1101. /* Compute the next node to which "NFA" transit from NODE("NFA" is a NFA
  1102. corresponding to the DFA).
  1103. Return the destination node, and update EPS_VIA_NODES, return -1 in case
  1104. of errors. */
  1105. static int
  1106. proceed_next_node (mctx, nregs, regs, pidx, node, eps_via_nodes, fs)
  1107. const re_match_context_t *mctx;
  1108. regmatch_t *regs;
  1109. int nregs, *pidx, node;
  1110. re_node_set *eps_via_nodes;
  1111. struct re_fail_stack_t *fs;
  1112. {
  1113. re_dfa_t *const dfa = mctx->dfa;
  1114. int i, err, dest_node;
  1115. dest_node = -1;
  1116. if (IS_EPSILON_NODE (dfa->nodes[node].type))
  1117. {
  1118. re_node_set *cur_nodes = &mctx->state_log[*pidx]->nodes;
  1119. re_node_set *edests = &dfa->edests[node];
  1120. int dest_node;
  1121. err = re_node_set_insert (eps_via_nodes, node);
  1122. if (BE (err < 0, 0))
  1123. return -2;
  1124. /* Pick up a valid destination, or return -1 if none is found. */
  1125. for (dest_node = -1, i = 0; i < edests->nelem; ++i)
  1126. {
  1127. int candidate = edests->elems[i];
  1128. if (!re_node_set_contains (cur_nodes, candidate))
  1129. continue;
  1130. if (dest_node == -1)
  1131. dest_node = candidate;
  1132. else
  1133. {
  1134. /* In order to avoid infinite loop like "(a*)*", return the second
  1135. epsilon-transition if the first was already considered. */
  1136. if (re_node_set_contains (eps_via_nodes, dest_node))
  1137. return candidate;
  1138. /* Otherwise, push the second epsilon-transition on the fail stack. */
  1139. else if (fs != NULL
  1140. && push_fail_stack (fs, *pidx, candidate, nregs, regs,
  1141. eps_via_nodes))
  1142. return -2;
  1143. /* We know we are going to exit. */
  1144. break;
  1145. }
  1146. }
  1147. return dest_node;
  1148. }
  1149. else
  1150. {
  1151. int naccepted = 0;
  1152. re_token_type_t type = dfa->nodes[node].type;
  1153. #ifdef RE_ENABLE_I18N
  1154. if (dfa->nodes[node].accept_mb)
  1155. naccepted = check_node_accept_bytes (dfa, node, &mctx->input, *pidx);
  1156. else
  1157. #endif /* RE_ENABLE_I18N */
  1158. if (type == OP_BACK_REF)
  1159. {
  1160. int subexp_idx = dfa->nodes[node].opr.idx + 1;
  1161. naccepted = regs[subexp_idx].rm_eo - regs[subexp_idx].rm_so;
  1162. if (fs != NULL)
  1163. {
  1164. if (regs[subexp_idx].rm_so == -1 || regs[subexp_idx].rm_eo == -1)
  1165. return -1;
  1166. else if (naccepted)
  1167. {
  1168. char *buf = (char *) re_string_get_buffer (&mctx->input);
  1169. if (memcmp (buf + regs[subexp_idx].rm_so, buf + *pidx,
  1170. naccepted) != 0)
  1171. return -1;
  1172. }
  1173. }
  1174. if (naccepted == 0)
  1175. {
  1176. err = re_node_set_insert (eps_via_nodes, node);
  1177. if (BE (err < 0, 0))
  1178. return -2;
  1179. dest_node = dfa->edests[node].elems[0];
  1180. if (re_node_set_contains (&mctx->state_log[*pidx]->nodes,
  1181. dest_node))
  1182. return dest_node;
  1183. }
  1184. }
  1185. if (naccepted != 0
  1186. || check_node_accept (mctx, dfa->nodes + node, *pidx))
  1187. {
  1188. dest_node = dfa->nexts[node];
  1189. *pidx = (naccepted == 0) ? *pidx + 1 : *pidx + naccepted;
  1190. if (fs && (*pidx > mctx->match_last || mctx->state_log[*pidx] == NULL
  1191. || !re_node_set_contains (&mctx->state_log[*pidx]->nodes,
  1192. dest_node)))
  1193. return -1;
  1194. re_node_set_empty (eps_via_nodes);
  1195. return dest_node;
  1196. }
  1197. }
  1198. return -1;
  1199. }
  1200. static reg_errcode_t
  1201. push_fail_stack (fs, str_idx, dest_node, nregs, regs, eps_via_nodes)
  1202. struct re_fail_stack_t *fs;
  1203. int str_idx, dest_node, nregs;
  1204. regmatch_t *regs;
  1205. re_node_set *eps_via_nodes;
  1206. {
  1207. reg_errcode_t err;
  1208. int num = fs->num++;
  1209. if (fs->num == fs->alloc)
  1210. {
  1211. struct re_fail_stack_ent_t *new_array;
  1212. new_array = realloc (fs->stack, (sizeof (struct re_fail_stack_ent_t)
  1213. * fs->alloc * 2));
  1214. if (new_array == NULL)
  1215. return REG_ESPACE;
  1216. fs->alloc *= 2;
  1217. fs->stack = new_array;
  1218. }
  1219. fs->stack[num].idx = str_idx;
  1220. fs->stack[num].node = dest_node;
  1221. fs->stack[num].regs = re_malloc (regmatch_t, nregs);
  1222. if (fs->stack[num].regs == NULL)
  1223. return REG_ESPACE;
  1224. memcpy (fs->stack[num].regs, regs, sizeof (regmatch_t) * nregs);
  1225. err = re_node_set_init_copy (&fs->stack[num].eps_via_nodes, eps_via_nodes);
  1226. return err;
  1227. }
  1228. static int
  1229. pop_fail_stack (fs, pidx, nregs, regs, eps_via_nodes)
  1230. struct re_fail_stack_t *fs;
  1231. int *pidx, nregs;
  1232. regmatch_t *regs;
  1233. re_node_set *eps_via_nodes;
  1234. {
  1235. int num = --fs->num;
  1236. assert (num >= 0);
  1237. *pidx = fs->stack[num].idx;
  1238. memcpy (regs, fs->stack[num].regs, sizeof (regmatch_t) * nregs);
  1239. re_node_set_free (eps_via_nodes);
  1240. re_free (fs->stack[num].regs);
  1241. *eps_via_nodes = fs->stack[num].eps_via_nodes;
  1242. return fs->stack[num].node;
  1243. }
  1244. /* Set the positions where the subexpressions are starts/ends to registers
  1245. PMATCH.
  1246. Note: We assume that pmatch[0] is already set, and
  1247. pmatch[i].rm_so == pmatch[i].rm_eo == -1 for 0 < i < nmatch. */
  1248. static reg_errcode_t
  1249. set_regs (preg, mctx, nmatch, pmatch, fl_backtrack)
  1250. const regex_t *preg;
  1251. const re_match_context_t *mctx;
  1252. size_t nmatch;
  1253. regmatch_t *pmatch;
  1254. int fl_backtrack;
  1255. {
  1256. re_dfa_t *dfa = (re_dfa_t *) preg->buffer;
  1257. int idx, cur_node;
  1258. re_node_set eps_via_nodes;
  1259. struct re_fail_stack_t *fs;
  1260. struct re_fail_stack_t fs_body = { 0, 2, NULL };
  1261. regmatch_t *prev_idx_match;
  1262. #ifdef DEBUG
  1263. assert (nmatch > 1);
  1264. assert (mctx->state_log != NULL);
  1265. #endif
  1266. if (fl_backtrack)
  1267. {
  1268. fs = &fs_body;
  1269. fs->stack = re_malloc (struct re_fail_stack_ent_t, fs->alloc);
  1270. if (fs->stack == NULL)
  1271. return REG_ESPACE;
  1272. }
  1273. else
  1274. fs = NULL;
  1275. cur_node = dfa->init_node;
  1276. re_node_set_init_empty (&eps_via_nodes);
  1277. prev_idx_match = (regmatch_t *) alloca (sizeof (regmatch_t) * nmatch);
  1278. memcpy (prev_idx_match, pmatch, sizeof (regmatch_t) * nmatch);
  1279. for (idx = pmatch[0].rm_so; idx <= pmatch[0].rm_eo ;)
  1280. {
  1281. update_regs (dfa, pmatch, prev_idx_match, cur_node, idx, nmatch);
  1282. if (idx == pmatch[0].rm_eo && cur_node == mctx->last_node)
  1283. {
  1284. int reg_idx;
  1285. if (fs)
  1286. {
  1287. for (reg_idx = 0; reg_idx < nmatch; ++reg_idx)
  1288. if (pmatch[reg_idx].rm_so > -1 && pmatch[reg_idx].rm_eo == -1)
  1289. break;
  1290. if (reg_idx == nmatch)
  1291. {
  1292. re_node_set_free (&eps_via_nodes);
  1293. return free_fail_stack_return (fs);
  1294. }
  1295. cur_node = pop_fail_stack (fs, &idx, nmatch, pmatch,
  1296. &eps_via_nodes);
  1297. }
  1298. else
  1299. {
  1300. re_node_set_free (&eps_via_nodes);
  1301. return REG_NOERROR;
  1302. }
  1303. }
  1304. /* Proceed to next node. */
  1305. cur_node = proceed_next_node (mctx, nmatch, pmatch, &idx, cur_node,
  1306. &eps_via_nodes, fs);
  1307. if (BE (cur_node < 0, 0))
  1308. {
  1309. if (BE (cur_node == -2, 0))
  1310. {
  1311. re_node_set_free (&eps_via_nodes);
  1312. free_fail_stack_return (fs);
  1313. return REG_ESPACE;
  1314. }
  1315. if (fs)
  1316. cur_node = pop_fail_stack (fs, &idx, nmatch, pmatch,
  1317. &eps_via_nodes);
  1318. else
  1319. {
  1320. re_node_set_free (&eps_via_nodes);
  1321. return REG_NOMATCH;
  1322. }
  1323. }
  1324. }
  1325. re_node_set_free (&eps_via_nodes);
  1326. return free_fail_stack_return (fs);
  1327. }
  1328. static reg_errcode_t
  1329. free_fail_stack_return (fs)
  1330. struct re_fail_stack_t *fs;
  1331. {
  1332. if (fs)
  1333. {
  1334. int fs_idx;
  1335. for (fs_idx = 0; fs_idx < fs->num; ++fs_idx)
  1336. {
  1337. re_node_set_free (&fs->stack[fs_idx].eps_via_nodes);
  1338. re_free (fs->stack[fs_idx].regs);
  1339. }
  1340. re_free (fs->stack);
  1341. }
  1342. return REG_NOERROR;
  1343. }
  1344. static void
  1345. update_regs (dfa, pmatch, prev_idx_match, cur_node, cur_idx, nmatch)
  1346. re_dfa_t *dfa;
  1347. regmatch_t *pmatch, *prev_idx_match;
  1348. int cur_node, cur_idx, nmatch;
  1349. {
  1350. int type = dfa->nodes[cur_node].type;
  1351. if (type == OP_OPEN_SUBEXP)
  1352. {
  1353. int reg_num = dfa->nodes[cur_node].opr.idx + 1;
  1354. /* We are at the first node of this sub expression. */
  1355. if (reg_num < nmatch)
  1356. {
  1357. pmatch[reg_num].rm_so = cur_idx;
  1358. pmatch[reg_num].rm_eo = -1;
  1359. }
  1360. }
  1361. else if (type == OP_CLOSE_SUBEXP)
  1362. {
  1363. int reg_num = dfa->nodes[cur_node].opr.idx + 1;
  1364. if (reg_num < nmatch)
  1365. {
  1366. /* We are at the last node of this sub expression. */
  1367. if (pmatch[reg_num].rm_so < cur_idx)
  1368. {
  1369. pmatch[reg_num].rm_eo = cur_idx;
  1370. /* This is a non-empty match or we are not inside an optional
  1371. subexpression. Accept this right away. */
  1372. memcpy (prev_idx_match, pmatch, sizeof (regmatch_t) * nmatch);
  1373. }
  1374. else
  1375. {
  1376. if (dfa->nodes[cur_node].opt_subexp
  1377. && prev_idx_match[reg_num].rm_so != -1)
  1378. /* We transited through an empty match for an optional
  1379. subexpression, like (a?)*, and this is not the subexp's
  1380. first match. Copy back the old content of the registers
  1381. so that matches of an inner subexpression are undone as
  1382. well, like in ((a?))*. */
  1383. memcpy (pmatch, prev_idx_match, sizeof (regmatch_t) * nmatch);
  1384. else
  1385. /* We completed a subexpression, but it may be part of
  1386. an optional one, so do not update PREV_IDX_MATCH. */
  1387. pmatch[reg_num].rm_eo = cur_idx;
  1388. }
  1389. }
  1390. }
  1391. }
  1392. /* This function checks the STATE_LOG from the SCTX->last_str_idx to 0
  1393. and sift the nodes in each states according to the following rules.
  1394. Updated state_log will be wrote to STATE_LOG.
  1395. Rules: We throw away the Node `a' in the STATE_LOG[STR_IDX] if...
  1396. 1. When STR_IDX == MATCH_LAST(the last index in the state_log):
  1397. If `a' isn't the LAST_NODE and `a' can't epsilon transit to
  1398. the LAST_NODE, we throw away the node `a'.
  1399. 2. When 0 <= STR_IDX < MATCH_LAST and `a' accepts
  1400. string `s' and transit to `b':
  1401. i. If 'b' isn't in the STATE_LOG[STR_IDX+strlen('s')], we throw
  1402. away the node `a'.
  1403. ii. If 'b' is in the STATE_LOG[STR_IDX+strlen('s')] but 'b' is
  1404. thrown away, we throw away the node `a'.
  1405. 3. When 0 <= STR_IDX < MATCH_LAST and 'a' epsilon transit to 'b':
  1406. i. If 'b' isn't in the STATE_LOG[STR_IDX], we throw away the
  1407. node `a'.
  1408. ii. If 'b' is in the STATE_LOG[STR_IDX] but 'b' is thrown away,
  1409. we throw away the node `a'. */
  1410. #define STATE_NODE_CONTAINS(state,node) \
  1411. ((state) != NULL && re_node_set_contains (&(state)->nodes, node))
  1412. static reg_errcode_t
  1413. sift_states_backward (mctx, sctx)
  1414. re_match_context_t *mctx;
  1415. re_sift_context_t *sctx;
  1416. {
  1417. reg_errcode_t err;
  1418. int null_cnt = 0;
  1419. int str_idx = sctx->last_str_idx;
  1420. re_node_set cur_dest;
  1421. #ifdef DEBUG
  1422. assert (mctx->state_log != NULL && mctx->state_log[str_idx] != NULL);
  1423. #endif
  1424. /* Build sifted state_log[str_idx]. It has the nodes which can epsilon
  1425. transit to the last_node and the last_node itself. */
  1426. err = re_node_set_init_1 (&cur_dest, sctx->last_node);
  1427. if (BE (err != REG_NOERROR, 0))
  1428. return err;
  1429. err = update_cur_sifted_state (mctx, sctx, str_idx, &cur_dest);
  1430. if (BE (err != REG_NOERROR, 0))
  1431. goto free_return;
  1432. /* Then check each states in the state_log. */
  1433. while (str_idx > 0)
  1434. {
  1435. /* Update counters. */
  1436. null_cnt = (sctx->sifted_states[str_idx] == NULL) ? null_cnt + 1 : 0;
  1437. if (null_cnt > mctx->max_mb_elem_len)
  1438. {
  1439. memset (sctx->sifted_states, '\0',
  1440. sizeof (re_dfastate_t *) * str_idx);
  1441. re_node_set_free (&cur_dest);
  1442. return REG_NOERROR;
  1443. }
  1444. re_node_set_empty (&cur_dest);
  1445. --str_idx;
  1446. if (mctx->state_log[str_idx])
  1447. {
  1448. err = build_sifted_states (mctx, sctx, str_idx, &cur_dest);
  1449. if (BE (err != REG_NOERROR, 0))
  1450. goto free_return;
  1451. }
  1452. /* Add all the nodes which satisfy the following conditions:
  1453. - It can epsilon transit to a node in CUR_DEST.
  1454. - It is in CUR_SRC.
  1455. And update state_log. */
  1456. err = update_cur_sifted_state (mctx, sctx, str_idx, &cur_dest);
  1457. if (BE (err != REG_NOERROR, 0))
  1458. goto free_return;
  1459. }
  1460. err = REG_NOERROR;
  1461. free_return:
  1462. re_node_set_free (&cur_dest);
  1463. return err;
  1464. }
  1465. static reg_errcode_t
  1466. build_sifted_states (mctx, sctx, str_idx, cur_dest)
  1467. re_match_context_t *mctx;
  1468. re_sift_context_t *sctx;
  1469. int str_idx;
  1470. re_node_set *cur_dest;
  1471. {
  1472. re_dfa_t *const dfa = mctx->dfa;
  1473. re_node_set *cur_src = &mctx->state_log[str_idx]->non_eps_nodes;
  1474. int i;
  1475. /* Then build the next sifted state.
  1476. We build the next sifted state on `cur_dest', and update
  1477. `sifted_states[str_idx]' with `cur_dest'.
  1478. Note:
  1479. `cur_dest' is the sifted state from `state_log[str_idx + 1]'.
  1480. `cur_src' points the node_set of the old `state_log[str_idx]'
  1481. (with the epsilon nodes pre-filtered out). */
  1482. for (i = 0; i < cur_src->nelem; i++)
  1483. {
  1484. int prev_node = cur_src->elems[i];
  1485. int naccepted = 0;
  1486. int ret;
  1487. #ifdef DEBUG
  1488. re_token_type_t type = dfa->nodes[prev_node].type;
  1489. assert (!IS_EPSILON_NODE (type));
  1490. #endif
  1491. #ifdef RE_ENABLE_I18N
  1492. /* If the node may accept `multi byte'. */
  1493. if (dfa->nodes[prev_node].accept_mb)
  1494. naccepted = sift_states_iter_mb (mctx, sctx, prev_node,
  1495. str_idx, sctx->last_str_idx);
  1496. #endif /* RE_ENABLE_I18N */
  1497. /* We don't check backreferences here.
  1498. See update_cur_sifted_state(). */
  1499. if (!naccepted
  1500. && check_node_accept (mctx, dfa->nodes + prev_node, str_idx)
  1501. && STATE_NODE_CONTAINS (sctx->sifted_states[str_idx + 1],
  1502. dfa->nexts[prev_node]))
  1503. naccepted = 1;
  1504. if (naccepted == 0)
  1505. continue;
  1506. if (sctx->limits.nelem)
  1507. {
  1508. int to_idx = str_idx + naccepted;
  1509. if (check_dst_limits (mctx, &sctx->limits,
  1510. dfa->nexts[prev_node], to_idx,
  1511. prev_node, str_idx))
  1512. continue;
  1513. }
  1514. ret = re_node_set_insert (cur_dest, prev_node);
  1515. if (BE (ret == -1, 0))
  1516. return REG_ESPACE;
  1517. }
  1518. return REG_NOERROR;
  1519. }
  1520. /* Helper functions. */
  1521. static reg_errcode_t
  1522. clean_state_log_if_needed (mctx, next_state_log_idx)
  1523. re_match_context_t *mctx;
  1524. int next_state_log_idx;
  1525. {
  1526. int top = mctx->state_log_top;
  1527. if (next_state_log_idx >= mctx->input.bufs_len
  1528. || (next_state_log_idx >= mctx->input.valid_len
  1529. && mctx->input.valid_len < mctx->input.len))
  1530. {
  1531. reg_errcode_t err;
  1532. err = extend_buffers (mctx);
  1533. if (BE (err != REG_NOERROR, 0))
  1534. return err;
  1535. }
  1536. if (top < next_state_log_idx)
  1537. {
  1538. memset (mctx->state_log + top + 1, '\0',
  1539. sizeof (re_dfastate_t *) * (next_state_log_idx - top));
  1540. mctx->state_log_top = next_state_log_idx;
  1541. }
  1542. return REG_NOERROR;
  1543. }
  1544. static reg_errcode_t
  1545. merge_state_array (dfa, dst, src, num)
  1546. re_dfa_t *dfa;
  1547. re_dfastate_t **dst;
  1548. re_dfastate_t **src;
  1549. int num;
  1550. {
  1551. int st_idx;
  1552. reg_errcode_t err;
  1553. for (st_idx = 0; st_idx < num; ++st_idx)
  1554. {
  1555. if (dst[st_idx] == NULL)
  1556. dst[st_idx] = src[st_idx];
  1557. else if (src[st_idx] != NULL)
  1558. {
  1559. re_node_set merged_set;
  1560. err = re_node_set_init_union (&merged_set, &dst[st_idx]->nodes,
  1561. &src[st_idx]->nodes);
  1562. if (BE (err != REG_NOERROR, 0))
  1563. return err;
  1564. dst[st_idx] = re_acquire_state (&err, dfa, &merged_set);
  1565. re_node_set_free (&merged_set);
  1566. if (BE (err != REG_NOERROR, 0))
  1567. return err;
  1568. }
  1569. }
  1570. return REG_NOERROR;
  1571. }
  1572. static reg_errcode_t
  1573. update_cur_sifted_state (mctx, sctx, str_idx, dest_nodes)
  1574. re_match_context_t *mctx;
  1575. re_sift_context_t *sctx;
  1576. int str_idx;
  1577. re_node_set *dest_nodes;
  1578. {
  1579. re_dfa_t *const dfa = mctx->dfa;
  1580. reg_errcode_t err;
  1581. const re_node_set *candidates;
  1582. candidates = ((mctx->state_log[str_idx] == NULL) ? NULL
  1583. : &mctx->state_log[str_idx]->nodes);
  1584. if (dest_nodes->nelem == 0)
  1585. sctx->sifted_states[str_idx] = NULL;
  1586. else
  1587. {
  1588. if (candidates)
  1589. {
  1590. /* At first, add the nodes which can epsilon transit to a node in
  1591. DEST_NODE. */
  1592. err = add_epsilon_src_nodes (dfa, dest_nodes, candidates);
  1593. if (BE (err != REG_NOERROR, 0))
  1594. return err;
  1595. /* Then, check the limitations in the current sift_context. */
  1596. if (sctx->limits.nelem)
  1597. {
  1598. err = check_subexp_limits (dfa, dest_nodes, candidates, &sctx->limits,
  1599. mctx->bkref_ents, str_idx);
  1600. if (BE (err != REG_NOERROR, 0))
  1601. return err;
  1602. }
  1603. }
  1604. sctx->sifted_states[str_idx] = re_acquire_state (&err, dfa, dest_nodes);
  1605. if (BE (err != REG_NOERROR, 0))
  1606. return err;
  1607. }
  1608. if (candidates && mctx->state_log[str_idx]->has_backref)
  1609. {
  1610. err = sift_states_bkref (mctx, sctx, str_idx, candidates);
  1611. if (BE (err != REG_NOERROR, 0))
  1612. return err;
  1613. }
  1614. return REG_NOERROR;
  1615. }
  1616. static reg_errcode_t
  1617. add_epsilon_src_nodes (dfa, dest_nodes, candidates)
  1618. re_dfa_t *dfa;
  1619. re_node_set *dest_nodes;
  1620. const re_node_set *candidates;
  1621. {
  1622. reg_errcode_t err = REG_NOERROR;
  1623. int i;
  1624. re_dfastate_t *state = re_acquire_state (&err, dfa, dest_nodes);
  1625. if (BE (err != REG_NOERROR, 0))
  1626. return err;
  1627. if (!state->inveclosure.alloc)
  1628. {
  1629. err = re_node_set_alloc (&state->inveclosure, dest_nodes->nelem);
  1630. if (BE (err != REG_NOERROR, 0))
  1631. return REG_ESPACE;
  1632. for (i = 0; i < dest_nodes->nelem; i++)
  1633. re_node_set_merge (&state->inveclosure,
  1634. dfa->inveclosures + dest_nodes->elems[i]);
  1635. }
  1636. return re_node_set_add_intersect (dest_nodes, candidates,
  1637. &state->inveclosure);
  1638. }
  1639. static reg_errcode_t
  1640. sub_epsilon_src_nodes (dfa, node, dest_nodes, candidates)
  1641. re_dfa_t *dfa;
  1642. int node;
  1643. re_node_set *dest_nodes;
  1644. const re_node_set *candidates;
  1645. {
  1646. int ecl_idx;
  1647. reg_errcode_t err;
  1648. re_node_set *inv_eclosure = dfa->inveclosures + node;
  1649. re_node_set except_nodes;
  1650. re_node_set_init_empty (&except_nodes);
  1651. for (ecl_idx = 0; ecl_idx < inv_eclosure->nelem; ++ecl_idx)
  1652. {
  1653. int cur_node = inv_eclosure->elems[ecl_idx];
  1654. if (cur_node == node)
  1655. continue;
  1656. if (IS_EPSILON_NODE (dfa->nodes[cur_node].type))
  1657. {
  1658. int edst1 = dfa->edests[cur_node].elems[0];
  1659. int edst2 = ((dfa->edests[cur_node].nelem > 1)
  1660. ? dfa->edests[cur_node].elems[1] : -1);
  1661. if ((!re_node_set_contains (inv_eclosure, edst1)
  1662. && re_node_set_contains (dest_nodes, edst1))
  1663. || (edst2 > 0
  1664. && !re_node_set_contains (inv_eclosure, edst2)
  1665. && re_node_set_contains (dest_nodes, edst2)))
  1666. {
  1667. err = re_node_set_add_intersect (&except_nodes, candidates,
  1668. dfa->inveclosures + cur_node);
  1669. if (BE (err != REG_NOERROR, 0))
  1670. {
  1671. re_node_set_free (&except_nodes);
  1672. return err;
  1673. }
  1674. }
  1675. }
  1676. }
  1677. for (ecl_idx = 0; ecl_idx < inv_eclosure->nelem; ++ecl_idx)
  1678. {
  1679. int cur_node = inv_eclosure->elems[ecl_idx];
  1680. if (!re_node_set_contains (&except_nodes, cur_node))
  1681. {
  1682. int idx = re_node_set_contains (dest_nodes, cur_node) - 1;
  1683. re_node_set_remove_at (dest_nodes, idx);
  1684. }
  1685. }
  1686. re_node_set_free (&except_nodes);
  1687. return REG_NOERROR;
  1688. }
  1689. static int
  1690. check_dst_limits (mctx, limits, dst_node, dst_idx, src_node, src_idx)
  1691. re_match_context_t *mctx;
  1692. re_node_set *limits;
  1693. int dst_node, dst_idx, src_node, src_idx;
  1694. {
  1695. re_dfa_t *const dfa = mctx->dfa;
  1696. int lim_idx, src_pos, dst_pos;
  1697. int dst_bkref_idx = search_cur_bkref_entry (mctx, dst_idx);
  1698. int src_bkref_idx = search_cur_bkref_entry (mctx, src_idx);
  1699. for (lim_idx = 0; lim_idx < limits->nelem; ++lim_idx)
  1700. {
  1701. int subexp_idx;
  1702. struct re_backref_cache_entry *ent;
  1703. ent = mctx->bkref_ents + limits->elems[lim_idx];
  1704. subexp_idx = dfa->nodes[ent->node].opr.idx;
  1705. dst_pos = check_dst_limits_calc_pos (mctx, limits->elems[lim_idx],
  1706. subexp_idx, dst_node, dst_idx,
  1707. dst_bkref_idx);
  1708. src_pos = check_dst_limits_calc_pos (mctx, limits->elems[lim_idx],
  1709. subexp_idx, src_node, src_idx,
  1710. src_bkref_idx);
  1711. /* In case of:
  1712. <src> <dst> ( <subexp> )
  1713. ( <subexp> ) <src> <dst>
  1714. ( <subexp1> <src> <subexp2> <dst> <subexp3> ) */
  1715. if (src_pos == dst_pos)
  1716. continue; /* This is unrelated limitation. */
  1717. else
  1718. return 1;
  1719. }
  1720. return 0;
  1721. }
  1722. static int
  1723. check_dst_limits_calc_pos_1 (mctx, boundaries, subexp_idx, from_node, bkref_idx)
  1724. re_match_context_t *mctx;
  1725. int boundaries, subexp_idx, from_node, bkref_idx;
  1726. {
  1727. re_dfa_t *const dfa = mctx->dfa;
  1728. re_node_set *eclosures = dfa->eclosures + from_node;
  1729. int node_idx;
  1730. /* Else, we are on the boundary: examine the nodes on the epsilon
  1731. closure. */
  1732. for (node_idx = 0; node_idx < eclosures->nelem; ++node_idx)
  1733. {
  1734. int node = eclosures->elems[node_idx];
  1735. switch (dfa->nodes[node].type)
  1736. {
  1737. case OP_BACK_REF:
  1738. if (bkref_idx != -1)
  1739. {
  1740. struct re_backref_cache_entry *ent = mctx->bkref_ents + bkref_idx;
  1741. do
  1742. {
  1743. int dst, cpos;
  1744. if (ent->node != node)
  1745. continue;
  1746. if (subexp_idx <= 8 * sizeof (ent->eps_reachable_subexps_map)
  1747. && !(ent->eps_reachable_subexps_map & (1 << subexp_idx)))
  1748. continue;
  1749. /* Recurse trying to reach the OP_OPEN_SUBEXP and
  1750. OP_CLOSE_SUBEXP cases below. But, if the
  1751. destination node is the same node as the source
  1752. node, don't recurse because it would cause an
  1753. infinite loop: a regex that exhibits this behavior
  1754. is ()\1*\1* */
  1755. dst = dfa->edests[node].elems[0];
  1756. if (dst == from_node)
  1757. {
  1758. if (boundaries & 1)
  1759. return -1;
  1760. else /* if (boundaries & 2) */
  1761. return 0;
  1762. }
  1763. cpos =
  1764. check_dst_limits_calc_pos_1 (mctx, boundaries, subexp_idx,
  1765. dst, bkref_idx);
  1766. if (cpos == -1 /* && (boundaries & 1) */)
  1767. return -1;
  1768. if (cpos == 0 && (boundaries & 2))
  1769. return 0;
  1770. ent->eps_reachable_subexps_map &= ~(1 << subexp_idx);
  1771. }
  1772. while (ent++->more);
  1773. }
  1774. break;
  1775. case OP_OPEN_SUBEXP:
  1776. if ((boundaries & 1) && subexp_idx == dfa->nodes[node].opr.idx)
  1777. return -1;
  1778. break;
  1779. case OP_CLOSE_SUBEXP:
  1780. if ((boundaries & 2) && subexp_idx == dfa->nodes[node].opr.idx)
  1781. return 0;
  1782. break;
  1783. default:
  1784. break;
  1785. }
  1786. }
  1787. return (boundaries & 2) ? 1 : 0;
  1788. }
  1789. static int
  1790. check_dst_limits_calc_pos (mctx, limit, subexp_idx, from_node, str_idx, bkref_idx)
  1791. re_match_context_t *mctx;
  1792. int limit, subexp_idx, from_node, str_idx, bkref_idx;
  1793. {
  1794. struct re_backref_cache_entry *lim = mctx->bkref_ents + limit;
  1795. int boundaries;
  1796. /* If we are outside the range of the subexpression, return -1 or 1. */
  1797. if (str_idx < lim->subexp_from)
  1798. return -1;
  1799. if (lim->subexp_to < str_idx)
  1800. return 1;
  1801. /* If we are within the subexpression, return 0. */
  1802. boundaries = (str_idx == lim->subexp_from);
  1803. boundaries |= (str_idx == lim->subexp_to) << 1;
  1804. if (boundaries == 0)
  1805. return 0;
  1806. /* Else, examine epsilon closure. */
  1807. return check_dst_limits_calc_pos_1 (mctx, boundaries, subexp_idx,
  1808. from_node, bkref_idx);
  1809. }
  1810. /* Check the limitations of sub expressions LIMITS, and remove the nodes
  1811. which are against limitations from DEST_NODES. */
  1812. static reg_errcode_t
  1813. check_subexp_limits (dfa, dest_nodes, candidates, limits, bkref_ents, str_idx)
  1814. re_dfa_t *dfa;
  1815. re_node_set *dest_nodes;
  1816. const re_node_set *candidates;
  1817. re_node_set *limits;
  1818. struct re_backref_cache_entry *bkref_ents;
  1819. int str_idx;
  1820. {
  1821. reg_errcode_t err;
  1822. int node_idx, lim_idx;
  1823. for (lim_idx = 0; lim_idx < limits->nelem; ++lim_idx)
  1824. {
  1825. int subexp_idx;
  1826. struct re_backref_cache_entry *ent;
  1827. ent = bkref_ents + limits->elems[lim_idx];
  1828. if (str_idx <= ent->subexp_from || ent->str_idx < str_idx)
  1829. continue; /* This is unrelated limitation. */
  1830. subexp_idx = dfa->nodes[ent->node].opr.idx;
  1831. if (ent->subexp_to == str_idx)
  1832. {
  1833. int ops_node = -1;
  1834. int cls_node = -1;
  1835. for (node_idx = 0; node_idx < dest_nodes->nelem; ++node_idx)
  1836. {
  1837. int node = dest_nodes->elems[node_idx];
  1838. re_token_type_t type = dfa->nodes[node].type;
  1839. if (type == OP_OPEN_SUBEXP
  1840. && subexp_idx == dfa->nodes[node].opr.idx)
  1841. ops_node = node;
  1842. else if (type == OP_CLOSE_SUBEXP
  1843. && subexp_idx == dfa->nodes[node].opr.idx)
  1844. cls_node = node;
  1845. }
  1846. /* Check the limitation of the open subexpression. */
  1847. /* Note that (ent->subexp_to = str_idx != ent->subexp_from). */
  1848. if (ops_node >= 0)
  1849. {
  1850. err = sub_epsilon_src_nodes (dfa, ops_node, dest_nodes,
  1851. candidates);
  1852. if (BE (err != REG_NOERROR, 0))
  1853. return err;
  1854. }
  1855. /* Check the limitation of the close subexpression. */
  1856. if (cls_node >= 0)
  1857. for (node_idx = 0; node_idx < dest_nodes->nelem; ++node_idx)
  1858. {
  1859. int node = dest_nodes->elems[node_idx];
  1860. if (!re_node_set_contains (dfa->inveclosures + node,
  1861. cls_node)
  1862. && !re_node_set_contains (dfa->eclosures + node,
  1863. cls_node))
  1864. {
  1865. /* It is against this limitation.
  1866. Remove it form the current sifted state. */
  1867. err = sub_epsilon_src_nodes (dfa, node, dest_nodes,
  1868. candidates);
  1869. if (BE (err != REG_NOERROR, 0))
  1870. return err;
  1871. --node_idx;
  1872. }
  1873. }
  1874. }
  1875. else /* (ent->subexp_to != str_idx) */
  1876. {
  1877. for (node_idx = 0; node_idx < dest_nodes->nelem; ++node_idx)
  1878. {
  1879. int node = dest_nodes->elems[node_idx];
  1880. re_token_type_t type = dfa->nodes[node].type;
  1881. if (type == OP_CLOSE_SUBEXP || type == OP_OPEN_SUBEXP)
  1882. {
  1883. if (subexp_idx != dfa->nodes[node].opr.idx)
  1884. continue;
  1885. /* It is against this limitation.
  1886. Remove it form the current sifted state. */
  1887. err = sub_epsilon_src_nodes (dfa, node, dest_nodes,
  1888. candidates);
  1889. if (BE (err != REG_NOERROR, 0))
  1890. return err;
  1891. }
  1892. }
  1893. }
  1894. }
  1895. return REG_NOERROR;
  1896. }
  1897. static reg_errcode_t
  1898. sift_states_bkref (mctx, sctx, str_idx, candidates)
  1899. re_match_context_t *mctx;
  1900. re_sift_context_t *sctx;
  1901. int str_idx;
  1902. const re_node_set *candidates;
  1903. {
  1904. re_dfa_t *const dfa = mctx->dfa;
  1905. reg_errcode_t err;
  1906. int node_idx, node;
  1907. re_sift_context_t local_sctx;
  1908. int first_idx = search_cur_bkref_entry (mctx, str_idx);
  1909. if (first_idx == -1)
  1910. return REG_NOERROR;
  1911. local_sctx.sifted_states = NULL; /* Mark that it hasn't been initialized. */
  1912. for (node_idx = 0; node_idx < candidates->nelem; ++node_idx)
  1913. {
  1914. int enabled_idx;
  1915. re_token_type_t type;
  1916. struct re_backref_cache_entry *entry;
  1917. node = candidates->elems[node_idx];
  1918. type = dfa->nodes[node].type;
  1919. /* Avoid infinite loop for the REs like "()\1+". */
  1920. if (node == sctx->last_node && str_idx == sctx->last_str_idx)
  1921. continue;
  1922. if (type != OP_BACK_REF)
  1923. continue;
  1924. entry = mctx->bkref_ents + first_idx;
  1925. enabled_idx = first_idx;
  1926. do
  1927. {
  1928. int subexp_len, to_idx, dst_node;
  1929. re_dfastate_t *cur_state;
  1930. if (entry->node != node)
  1931. continue;
  1932. subexp_len = entry->subexp_to - entry->subexp_from;
  1933. to_idx = str_idx + subexp_len;
  1934. dst_node = (subexp_len ? dfa->nexts[node]
  1935. : dfa->edests[node].elems[0]);
  1936. if (to_idx > sctx->last_str_idx
  1937. || sctx->sifted_states[to_idx] == NULL
  1938. || !STATE_NODE_CONTAINS (sctx->sifted_states[to_idx], dst_node)
  1939. || check_dst_limits (mctx, &sctx->limits, node,
  1940. str_idx, dst_node, to_idx))
  1941. continue;
  1942. if (local_sctx.sifted_states == NULL)
  1943. {
  1944. local_sctx = *sctx;
  1945. err = re_node_set_init_copy (&local_sctx.limits, &sctx->limits);
  1946. if (BE (err != REG_NOERROR, 0))
  1947. goto free_return;
  1948. }
  1949. local_sctx.last_node = node;
  1950. local_sctx.last_str_idx = str_idx;
  1951. err = re_node_set_insert (&local_sctx.limits, enabled_idx);
  1952. if (BE (err < 0, 0))
  1953. {
  1954. err = REG_ESPACE;
  1955. goto free_return;
  1956. }
  1957. cur_state = local_sctx.sifted_states[str_idx];
  1958. err = sift_states_backward (mctx, &local_sctx);
  1959. if (BE (err != REG_NOERROR, 0))
  1960. goto free_return;
  1961. if (sctx->limited_states != NULL)
  1962. {
  1963. err = merge_state_array (dfa, sctx->limited_states,
  1964. local_sctx.sifted_states,
  1965. str_idx + 1);
  1966. if (BE (err != REG_NOERROR, 0))
  1967. goto free_return;
  1968. }
  1969. local_sctx.sifted_states[str_idx] = cur_state;
  1970. re_node_set_remove (&local_sctx.limits, enabled_idx);
  1971. /* mctx->bkref_ents may have changed, reload the pointer. */
  1972. entry = mctx->bkref_ents + enabled_idx;
  1973. }
  1974. while (enabled_idx++, entry++->more);
  1975. }
  1976. err = REG_NOERROR;
  1977. free_return:
  1978. if (local_sctx.sifted_states != NULL)
  1979. {
  1980. re_node_set_free (&local_sctx.limits);
  1981. }
  1982. return err;
  1983. }
  1984. #ifdef RE_ENABLE_I18N
  1985. static int
  1986. sift_states_iter_mb (mctx, sctx, node_idx, str_idx, max_str_idx)
  1987. const re_match_context_t *mctx;
  1988. re_sift_context_t *sctx;
  1989. int node_idx, str_idx, max_str_idx;
  1990. {
  1991. re_dfa_t *const dfa = mctx->dfa;
  1992. int naccepted;
  1993. /* Check the node can accept `multi byte'. */
  1994. naccepted = check_node_accept_bytes (dfa, node_idx, &mctx->input, str_idx);
  1995. if (naccepted > 0 && str_idx + naccepted <= max_str_idx &&
  1996. !STATE_NODE_CONTAINS (sctx->sifted_states[str_idx + naccepted],
  1997. dfa->nexts[node_idx]))
  1998. /* The node can't accept the `multi byte', or the
  1999. destination was already thrown away, then the node
  2000. could't accept the current input `multi byte'. */
  2001. naccepted = 0;
  2002. /* Otherwise, it is sure that the node could accept
  2003. `naccepted' bytes input. */
  2004. return naccepted;
  2005. }
  2006. #endif /* RE_ENABLE_I18N */
  2007. /* Functions for state transition. */
  2008. /* Return the next state to which the current state STATE will transit by
  2009. accepting the current input byte, and update STATE_LOG if necessary.
  2010. If STATE can accept a multibyte char/collating element/back reference
  2011. update the destination of STATE_LOG. */
  2012. static re_dfastate_t *
  2013. transit_state (err, mctx, state)
  2014. reg_errcode_t *err;
  2015. re_match_context_t *mctx;
  2016. re_dfastate_t *state;
  2017. {
  2018. re_dfastate_t **trtable;
  2019. unsigned char ch;
  2020. #ifdef RE_ENABLE_I18N
  2021. /* If the current state can accept multibyte. */
  2022. if (BE (state->accept_mb, 0))
  2023. {
  2024. *err = transit_state_mb (mctx, state);
  2025. if (BE (*err != REG_NOERROR, 0))
  2026. return NULL;
  2027. }
  2028. #endif /* RE_ENABLE_I18N */
  2029. /* Then decide the next state with the single byte. */
  2030. #if 0
  2031. if (0)
  2032. /* don't use transition table */
  2033. return transit_state_sb (err, mctx, state);
  2034. #endif
  2035. /* Use transition table */
  2036. ch = re_string_fetch_byte (&mctx->input);
  2037. for (;;)
  2038. {
  2039. trtable = state->trtable;
  2040. if (BE (trtable != NULL, 1))
  2041. return trtable[ch];
  2042. trtable = state->word_trtable;
  2043. if (BE (trtable != NULL, 1))
  2044. {
  2045. unsigned int context;
  2046. context
  2047. = re_string_context_at (&mctx->input,
  2048. re_string_cur_idx (&mctx->input) - 1,
  2049. mctx->eflags);
  2050. if (IS_WORD_CONTEXT (context))
  2051. return trtable[ch + SBC_MAX];
  2052. else
  2053. return trtable[ch];
  2054. }
  2055. if (!build_trtable (mctx->dfa, state))
  2056. {
  2057. *err = REG_ESPACE;
  2058. return NULL;
  2059. }
  2060. /* Retry, we now have a transition table. */
  2061. }
  2062. }
  2063. /* Update the state_log if we need */
  2064. re_dfastate_t *
  2065. merge_state_with_log (err, mctx, next_state)
  2066. reg_errcode_t *err;
  2067. re_match_context_t *mctx;
  2068. re_dfastate_t *next_state;
  2069. {
  2070. re_dfa_t *const dfa = mctx->dfa;
  2071. int cur_idx = re_string_cur_idx (&mctx->input);
  2072. if (cur_idx > mctx->state_log_top)
  2073. {
  2074. mctx->state_log[cur_idx] = next_state;
  2075. mctx->state_log_top = cur_idx;
  2076. }
  2077. else if (mctx->state_log[cur_idx] == 0)
  2078. {
  2079. mctx->state_log[cur_idx] = next_state;
  2080. }
  2081. else
  2082. {
  2083. re_dfastate_t *pstate;
  2084. unsigned int context;
  2085. re_node_set next_nodes, *log_nodes, *table_nodes = NULL;
  2086. /* If (state_log[cur_idx] != 0), it implies that cur_idx is
  2087. the destination of a multibyte char/collating element/
  2088. back reference. Then the next state is the union set of
  2089. these destinations and the results of the transition table. */
  2090. pstate = mctx->state_log[cur_idx];
  2091. log_nodes = pstate->entrance_nodes;
  2092. if (next_state != NULL)
  2093. {
  2094. table_nodes = next_state->entrance_nodes;
  2095. *err = re_node_set_init_union (&next_nodes, table_nodes,
  2096. log_nodes);
  2097. if (BE (*err != REG_NOERROR, 0))
  2098. return NULL;
  2099. }
  2100. else
  2101. next_nodes = *log_nodes;
  2102. /* Note: We already add the nodes of the initial state,
  2103. then we don't need to add them here. */
  2104. context = re_string_context_at (&mctx->input,
  2105. re_string_cur_idx (&mctx->input) - 1,
  2106. mctx->eflags);
  2107. next_state = mctx->state_log[cur_idx]
  2108. = re_acquire_state_context (err, dfa, &next_nodes, context);
  2109. /* We don't need to check errors here, since the return value of
  2110. this function is next_state and ERR is already set. */
  2111. if (table_nodes != NULL)
  2112. re_node_set_free (&next_nodes);
  2113. }
  2114. if (BE (dfa->nbackref, 0) && next_state != NULL)
  2115. {
  2116. /* Check OP_OPEN_SUBEXP in the current state in case that we use them
  2117. later. We must check them here, since the back references in the
  2118. next state might use them. */
  2119. *err = check_subexp_matching_top (mctx, &next_state->nodes,
  2120. cur_idx);
  2121. if (BE (*err != REG_NOERROR, 0))
  2122. return NULL;
  2123. /* If the next state has back references. */
  2124. if (next_state->has_backref)
  2125. {
  2126. *err = transit_state_bkref (mctx, &next_state->nodes);
  2127. if (BE (*err != REG_NOERROR, 0))
  2128. return NULL;
  2129. next_state = mctx->state_log[cur_idx];
  2130. }
  2131. }
  2132. return next_state;
  2133. }
  2134. /* Skip bytes in the input that correspond to part of a
  2135. multi-byte match, then look in the log for a state
  2136. from which to restart matching. */
  2137. re_dfastate_t *
  2138. find_recover_state (err, mctx)
  2139. reg_errcode_t *err;
  2140. re_match_context_t *mctx;
  2141. {
  2142. re_dfastate_t *cur_state = NULL;
  2143. do
  2144. {
  2145. int max = mctx->state_log_top;
  2146. int cur_str_idx = re_string_cur_idx (&mctx->input);
  2147. do
  2148. {
  2149. if (++cur_str_idx > max)
  2150. return NULL;
  2151. re_string_skip_bytes (&mctx->input, 1);
  2152. }
  2153. while (mctx->state_log[cur_str_idx] == NULL);
  2154. cur_state = merge_state_with_log (err, mctx, NULL);
  2155. }
  2156. while (err == REG_NOERROR && cur_state == NULL);
  2157. return cur_state;
  2158. }
  2159. /* Helper functions for transit_state. */
  2160. /* From the node set CUR_NODES, pick up the nodes whose types are
  2161. OP_OPEN_SUBEXP and which have corresponding back references in the regular
  2162. expression. And register them to use them later for evaluating the
  2163. correspoding back references. */
  2164. static reg_errcode_t
  2165. check_subexp_matching_top (mctx, cur_nodes, str_idx)
  2166. re_match_context_t *mctx;
  2167. re_node_set *cur_nodes;
  2168. int str_idx;
  2169. {
  2170. re_dfa_t *const dfa = mctx->dfa;
  2171. int node_idx;
  2172. reg_errcode_t err;
  2173. /* TODO: This isn't efficient.
  2174. Because there might be more than one nodes whose types are
  2175. OP_OPEN_SUBEXP and whose index is SUBEXP_IDX, we must check all
  2176. nodes.
  2177. E.g. RE: (a){2} */
  2178. for (node_idx = 0; node_idx < cur_nodes->nelem; ++node_idx)
  2179. {
  2180. int node = cur_nodes->elems[node_idx];
  2181. if (dfa->nodes[node].type == OP_OPEN_SUBEXP
  2182. && dfa->nodes[node].opr.idx < (8 * sizeof (dfa->used_bkref_map))
  2183. && dfa->used_bkref_map & (1 << dfa->nodes[node].opr.idx))
  2184. {
  2185. err = match_ctx_add_subtop (mctx, node, str_idx);
  2186. if (BE (err != REG_NOERROR, 0))
  2187. return err;
  2188. }
  2189. }
  2190. return REG_NOERROR;
  2191. }
  2192. #if 0
  2193. /* Return the next state to which the current state STATE will transit by
  2194. accepting the current input byte. */
  2195. static re_dfastate_t *
  2196. transit_state_sb (err, mctx, state)
  2197. reg_errcode_t *err;
  2198. re_match_context_t *mctx;
  2199. re_dfastate_t *state;
  2200. {
  2201. re_dfa_t *const dfa = mctx->dfa;
  2202. re_node_set next_nodes;
  2203. re_dfastate_t *next_state;
  2204. int node_cnt, cur_str_idx = re_string_cur_idx (&mctx->input);
  2205. unsigned int context;
  2206. *err = re_node_set_alloc (&next_nodes, state->nodes.nelem + 1);
  2207. if (BE (*err != REG_NOERROR, 0))
  2208. return NULL;
  2209. for (node_cnt = 0; node_cnt < state->nodes.nelem; ++node_cnt)
  2210. {
  2211. int cur_node = state->nodes.elems[node_cnt];
  2212. if (check_node_accept (mctx, dfa->nodes + cur_node, cur_str_idx))
  2213. {
  2214. *err = re_node_set_merge (&next_nodes,
  2215. dfa->eclosures + dfa->nexts[cur_node]);
  2216. if (BE (*err != REG_NOERROR, 0))
  2217. {
  2218. re_node_set_free (&next_nodes);
  2219. return NULL;
  2220. }
  2221. }
  2222. }
  2223. context = re_string_context_at (&mctx->input, cur_str_idx, mctx->eflags);
  2224. next_state = re_acquire_state_context (err, dfa, &next_nodes, context);
  2225. /* We don't need to check errors here, since the return value of
  2226. this function is next_state and ERR is already set. */
  2227. re_node_set_free (&next_nodes);
  2228. re_string_skip_bytes (&mctx->input, 1);
  2229. return next_state;
  2230. }
  2231. #endif
  2232. #ifdef RE_ENABLE_I18N
  2233. static reg_errcode_t
  2234. transit_state_mb (mctx, pstate)
  2235. re_match_context_t *mctx;
  2236. re_dfastate_t *pstate;
  2237. {
  2238. re_dfa_t *const dfa = mctx->dfa;
  2239. reg_errcode_t err;
  2240. int i;
  2241. for (i = 0; i < pstate->nodes.nelem; ++i)
  2242. {
  2243. re_node_set dest_nodes, *new_nodes;
  2244. int cur_node_idx = pstate->nodes.elems[i];
  2245. int naccepted, dest_idx;
  2246. unsigned int context;
  2247. re_dfastate_t *dest_state;
  2248. if (!dfa->nodes[cur_node_idx].accept_mb)
  2249. continue;
  2250. if (dfa->nodes[cur_node_idx].constraint)
  2251. {
  2252. context = re_string_context_at (&mctx->input,
  2253. re_string_cur_idx (&mctx->input),
  2254. mctx->eflags);
  2255. if (NOT_SATISFY_NEXT_CONSTRAINT (dfa->nodes[cur_node_idx].constraint,
  2256. context))
  2257. continue;
  2258. }
  2259. /* How many bytes the node can accept? */
  2260. naccepted = check_node_accept_bytes (dfa, cur_node_idx, &mctx->input,
  2261. re_string_cur_idx (&mctx->input));
  2262. if (naccepted == 0)
  2263. continue;
  2264. /* The node can accepts `naccepted' bytes. */
  2265. dest_idx = re_string_cur_idx (&mctx->input) + naccepted;
  2266. mctx->max_mb_elem_len = ((mctx->max_mb_elem_len < naccepted) ? naccepted
  2267. : mctx->max_mb_elem_len);
  2268. err = clean_state_log_if_needed (mctx, dest_idx);
  2269. if (BE (err != REG_NOERROR, 0))
  2270. return err;
  2271. #ifdef DEBUG
  2272. assert (dfa->nexts[cur_node_idx] != -1);
  2273. #endif
  2274. new_nodes = dfa->eclosures + dfa->nexts[cur_node_idx];
  2275. dest_state = mctx->state_log[dest_idx];
  2276. if (dest_state == NULL)
  2277. dest_nodes = *new_nodes;
  2278. else
  2279. {
  2280. err = re_node_set_init_union (&dest_nodes,
  2281. dest_state->entrance_nodes, new_nodes);
  2282. if (BE (err != REG_NOERROR, 0))
  2283. return err;
  2284. }
  2285. context = re_string_context_at (&mctx->input, dest_idx - 1, mctx->eflags);
  2286. mctx->state_log[dest_idx]
  2287. = re_acquire_state_context (&err, dfa, &dest_nodes, context);
  2288. if (dest_state != NULL)
  2289. re_node_set_free (&dest_nodes);
  2290. if (BE (mctx->state_log[dest_idx] == NULL && err != REG_NOERROR, 0))
  2291. return err;
  2292. }
  2293. return REG_NOERROR;
  2294. }
  2295. #endif /* RE_ENABLE_I18N */
  2296. static reg_errcode_t
  2297. transit_state_bkref (mctx, nodes)
  2298. re_match_context_t *mctx;
  2299. const re_node_set *nodes;
  2300. {
  2301. re_dfa_t *const dfa = mctx->dfa;
  2302. reg_errcode_t err;
  2303. int i;
  2304. int cur_str_idx = re_string_cur_idx (&mctx->input);
  2305. for (i = 0; i < nodes->nelem; ++i)
  2306. {
  2307. int dest_str_idx, prev_nelem, bkc_idx;
  2308. int node_idx = nodes->elems[i];
  2309. unsigned int context;
  2310. const re_token_t *node = dfa->nodes + node_idx;
  2311. re_node_set *new_dest_nodes;
  2312. /* Check whether `node' is a backreference or not. */
  2313. if (node->type != OP_BACK_REF)
  2314. continue;
  2315. if (node->constraint)
  2316. {
  2317. context = re_string_context_at (&mctx->input, cur_str_idx,
  2318. mctx->eflags);
  2319. if (NOT_SATISFY_NEXT_CONSTRAINT (node->constraint, context))
  2320. continue;
  2321. }
  2322. /* `node' is a backreference.
  2323. Check the substring which the substring matched. */
  2324. bkc_idx = mctx->nbkref_ents;
  2325. err = get_subexp (mctx, node_idx, cur_str_idx);
  2326. if (BE (err != REG_NOERROR, 0))
  2327. goto free_return;
  2328. /* And add the epsilon closures (which is `new_dest_nodes') of
  2329. the backreference to appropriate state_log. */
  2330. #ifdef DEBUG
  2331. assert (dfa->nexts[node_idx] != -1);
  2332. #endif
  2333. for (; bkc_idx < mctx->nbkref_ents; ++bkc_idx)
  2334. {
  2335. int subexp_len;
  2336. re_dfastate_t *dest_state;
  2337. struct re_backref_cache_entry *bkref_ent;
  2338. bkref_ent = mctx->bkref_ents + bkc_idx;
  2339. if (bkref_ent->node != node_idx || bkref_ent->str_idx != cur_str_idx)
  2340. continue;
  2341. subexp_len = bkref_ent->subexp_to - bkref_ent->subexp_from;
  2342. new_dest_nodes = (subexp_len == 0
  2343. ? dfa->eclosures + dfa->edests[node_idx].elems[0]
  2344. : dfa->eclosures + dfa->nexts[node_idx]);
  2345. dest_str_idx = (cur_str_idx + bkref_ent->subexp_to
  2346. - bkref_ent->subexp_from);
  2347. context = re_string_context_at (&mctx->input, dest_str_idx - 1,
  2348. mctx->eflags);
  2349. dest_state = mctx->state_log[dest_str_idx];
  2350. prev_nelem = ((mctx->state_log[cur_str_idx] == NULL) ? 0
  2351. : mctx->state_log[cur_str_idx]->nodes.nelem);
  2352. /* Add `new_dest_node' to state_log. */
  2353. if (dest_state == NULL)
  2354. {
  2355. mctx->state_log[dest_str_idx]
  2356. = re_acquire_state_context (&err, dfa, new_dest_nodes,
  2357. context);
  2358. if (BE (mctx->state_log[dest_str_idx] == NULL
  2359. && err != REG_NOERROR, 0))
  2360. goto free_return;
  2361. }
  2362. else
  2363. {
  2364. re_node_set dest_nodes;
  2365. err = re_node_set_init_union (&dest_nodes,
  2366. dest_state->entrance_nodes,
  2367. new_dest_nodes);
  2368. if (BE (err != REG_NOERROR, 0))
  2369. {
  2370. re_node_set_free (&dest_nodes);
  2371. goto free_return;
  2372. }
  2373. mctx->state_log[dest_str_idx]
  2374. = re_acquire_state_context (&err, dfa, &dest_nodes, context);
  2375. re_node_set_free (&dest_nodes);
  2376. if (BE (mctx->state_log[dest_str_idx] == NULL
  2377. && err != REG_NOERROR, 0))
  2378. goto free_return;
  2379. }
  2380. /* We need to check recursively if the backreference can epsilon
  2381. transit. */
  2382. if (subexp_len == 0
  2383. && mctx->state_log[cur_str_idx]->nodes.nelem > prev_nelem)
  2384. {
  2385. err = check_subexp_matching_top (mctx, new_dest_nodes,
  2386. cur_str_idx);
  2387. if (BE (err != REG_NOERROR, 0))
  2388. goto free_return;
  2389. err = transit_state_bkref (mctx, new_dest_nodes);
  2390. if (BE (err != REG_NOERROR, 0))
  2391. goto free_return;
  2392. }
  2393. }
  2394. }
  2395. err = REG_NOERROR;
  2396. free_return:
  2397. return err;
  2398. }
  2399. /* Enumerate all the candidates which the backreference BKREF_NODE can match
  2400. at BKREF_STR_IDX, and register them by match_ctx_add_entry().
  2401. Note that we might collect inappropriate candidates here.
  2402. However, the cost of checking them strictly here is too high, then we
  2403. delay these checking for prune_impossible_nodes(). */
  2404. static reg_errcode_t
  2405. get_subexp (mctx, bkref_node, bkref_str_idx)
  2406. re_match_context_t *mctx;
  2407. int bkref_node, bkref_str_idx;
  2408. {
  2409. re_dfa_t *const dfa = mctx->dfa;
  2410. int subexp_num, sub_top_idx;
  2411. const char *buf = (const char *) re_string_get_buffer (&mctx->input);
  2412. /* Return if we have already checked BKREF_NODE at BKREF_STR_IDX. */
  2413. int cache_idx = search_cur_bkref_entry (mctx, bkref_str_idx);
  2414. if (cache_idx != -1)
  2415. {
  2416. const struct re_backref_cache_entry *entry = mctx->bkref_ents + cache_idx;
  2417. do
  2418. if (entry->node == bkref_node)
  2419. return REG_NOERROR; /* We already checked it. */
  2420. while (entry++->more);
  2421. }
  2422. subexp_num = dfa->nodes[bkref_node].opr.idx;
  2423. /* For each sub expression */
  2424. for (sub_top_idx = 0; sub_top_idx < mctx->nsub_tops; ++sub_top_idx)
  2425. {
  2426. reg_errcode_t err;
  2427. re_sub_match_top_t *sub_top = mctx->sub_tops[sub_top_idx];
  2428. re_sub_match_last_t *sub_last;
  2429. int sub_last_idx, sl_str, bkref_str_off;
  2430. if (dfa->nodes[sub_top->node].opr.idx != subexp_num)
  2431. continue; /* It isn't related. */
  2432. sl_str = sub_top->str_idx;
  2433. bkref_str_off = bkref_str_idx;
  2434. /* At first, check the last node of sub expressions we already
  2435. evaluated. */
  2436. for (sub_last_idx = 0; sub_last_idx < sub_top->nlasts; ++sub_last_idx)
  2437. {
  2438. int sl_str_diff;
  2439. sub_last = sub_top->lasts[sub_last_idx];
  2440. sl_str_diff = sub_last->str_idx - sl_str;
  2441. /* The matched string by the sub expression match with the substring
  2442. at the back reference? */
  2443. if (sl_str_diff > 0)
  2444. {
  2445. if (BE (bkref_str_off + sl_str_diff > mctx->input.valid_len, 0))
  2446. {
  2447. /* Not enough chars for a successful match. */
  2448. if (bkref_str_off + sl_str_diff > mctx->input.len)
  2449. break;
  2450. err = clean_state_log_if_needed (mctx,
  2451. bkref_str_off
  2452. + sl_str_diff);
  2453. if (BE (err != REG_NOERROR, 0))
  2454. return err;
  2455. buf = (const char *) re_string_get_buffer (&mctx->input);
  2456. }
  2457. if (memcmp (buf + bkref_str_off, buf + sl_str, sl_str_diff) != 0)
  2458. break; /* We don't need to search this sub expression any more. */
  2459. }
  2460. bkref_str_off += sl_str_diff;
  2461. sl_str += sl_str_diff;
  2462. err = get_subexp_sub (mctx, sub_top, sub_last, bkref_node,
  2463. bkref_str_idx);
  2464. /* Reload buf, since the preceding call might have reallocated
  2465. the buffer. */
  2466. buf = (const char *) re_string_get_buffer (&mctx->input);
  2467. if (err == REG_NOMATCH)
  2468. continue;
  2469. if (BE (err != REG_NOERROR, 0))
  2470. return err;
  2471. }
  2472. if (sub_last_idx < sub_top->nlasts)
  2473. continue;
  2474. if (sub_last_idx > 0)
  2475. ++sl_str;
  2476. /* Then, search for the other last nodes of the sub expression. */
  2477. for (; sl_str <= bkref_str_idx; ++sl_str)
  2478. {
  2479. int cls_node, sl_str_off;
  2480. const re_node_set *nodes;
  2481. sl_str_off = sl_str - sub_top->str_idx;
  2482. /* The matched string by the sub expression match with the substring
  2483. at the back reference? */
  2484. if (sl_str_off > 0)
  2485. {
  2486. if (BE (bkref_str_off >= mctx->input.valid_len, 0))
  2487. {
  2488. /* If we are at the end of the input, we cannot match. */
  2489. if (bkref_str_off >= mctx->input.len)
  2490. break;
  2491. err = extend_buffers (mctx);
  2492. if (BE (err != REG_NOERROR, 0))
  2493. return err;
  2494. buf = (const char *) re_string_get_buffer (&mctx->input);
  2495. }
  2496. if (buf [bkref_str_off++] != buf[sl_str - 1])
  2497. break; /* We don't need to search this sub expression
  2498. any more. */
  2499. }
  2500. if (mctx->state_log[sl_str] == NULL)
  2501. continue;
  2502. /* Does this state have a ')' of the sub expression? */
  2503. nodes = &mctx->state_log[sl_str]->nodes;
  2504. cls_node = find_subexp_node (dfa, nodes, subexp_num, OP_CLOSE_SUBEXP);
  2505. if (cls_node == -1)
  2506. continue; /* No. */
  2507. if (sub_top->path == NULL)
  2508. {
  2509. sub_top->path = calloc (sizeof (state_array_t),
  2510. sl_str - sub_top->str_idx + 1);
  2511. if (sub_top->path == NULL)
  2512. return REG_ESPACE;
  2513. }
  2514. /* Can the OP_OPEN_SUBEXP node arrive the OP_CLOSE_SUBEXP node
  2515. in the current context? */
  2516. err = check_arrival (mctx, sub_top->path, sub_top->node,
  2517. sub_top->str_idx, cls_node, sl_str, OP_CLOSE_SUBEXP);
  2518. if (err == REG_NOMATCH)
  2519. continue;
  2520. if (BE (err != REG_NOERROR, 0))
  2521. return err;
  2522. sub_last = match_ctx_add_sublast (sub_top, cls_node, sl_str);
  2523. if (BE (sub_last == NULL, 0))
  2524. return REG_ESPACE;
  2525. err = get_subexp_sub (mctx, sub_top, sub_last, bkref_node,
  2526. bkref_str_idx);
  2527. if (err == REG_NOMATCH)
  2528. continue;
  2529. }
  2530. }
  2531. return REG_NOERROR;
  2532. }
  2533. /* Helper functions for get_subexp(). */
  2534. /* Check SUB_LAST can arrive to the back reference BKREF_NODE at BKREF_STR.
  2535. If it can arrive, register the sub expression expressed with SUB_TOP
  2536. and SUB_LAST. */
  2537. static reg_errcode_t
  2538. get_subexp_sub (mctx, sub_top, sub_last, bkref_node, bkref_str)
  2539. re_match_context_t *mctx;
  2540. const re_sub_match_top_t *sub_top;
  2541. re_sub_match_last_t *sub_last;
  2542. int bkref_node, bkref_str;
  2543. {
  2544. reg_errcode_t err;
  2545. int to_idx;
  2546. /* Can the subexpression arrive the back reference? */
  2547. err = check_arrival (mctx, &sub_last->path, sub_last->node,
  2548. sub_last->str_idx, bkref_node, bkref_str, OP_OPEN_SUBEXP);
  2549. if (err != REG_NOERROR)
  2550. return err;
  2551. err = match_ctx_add_entry (mctx, bkref_node, bkref_str, sub_top->str_idx,
  2552. sub_last->str_idx);
  2553. if (BE (err != REG_NOERROR, 0))
  2554. return err;
  2555. to_idx = bkref_str + sub_last->str_idx - sub_top->str_idx;
  2556. return clean_state_log_if_needed (mctx, to_idx);
  2557. }
  2558. /* Find the first node which is '(' or ')' and whose index is SUBEXP_IDX.
  2559. Search '(' if FL_OPEN, or search ')' otherwise.
  2560. TODO: This function isn't efficient...
  2561. Because there might be more than one nodes whose types are
  2562. OP_OPEN_SUBEXP and whose index is SUBEXP_IDX, we must check all
  2563. nodes.
  2564. E.g. RE: (a){2} */
  2565. static int
  2566. find_subexp_node (dfa, nodes, subexp_idx, type)
  2567. const re_dfa_t *dfa;
  2568. const re_node_set *nodes;
  2569. int subexp_idx, type;
  2570. {
  2571. int cls_idx;
  2572. for (cls_idx = 0; cls_idx < nodes->nelem; ++cls_idx)
  2573. {
  2574. int cls_node = nodes->elems[cls_idx];
  2575. const re_token_t *node = dfa->nodes + cls_node;
  2576. if (node->type == type
  2577. && node->opr.idx == subexp_idx)
  2578. return cls_node;
  2579. }
  2580. return -1;
  2581. }
  2582. /* Check whether the node TOP_NODE at TOP_STR can arrive to the node
  2583. LAST_NODE at LAST_STR. We record the path onto PATH since it will be
  2584. heavily reused.
  2585. Return REG_NOERROR if it can arrive, or REG_NOMATCH otherwise. */
  2586. static reg_errcode_t
  2587. check_arrival (mctx, path, top_node, top_str, last_node, last_str,
  2588. type)
  2589. re_match_context_t *mctx;
  2590. state_array_t *path;
  2591. int top_node, top_str, last_node, last_str, type;
  2592. {
  2593. re_dfa_t *const dfa = mctx->dfa;
  2594. reg_errcode_t err;
  2595. int subexp_num, backup_cur_idx, str_idx, null_cnt;
  2596. re_dfastate_t *cur_state = NULL;
  2597. re_node_set *cur_nodes, next_nodes;
  2598. re_dfastate_t **backup_state_log;
  2599. unsigned int context;
  2600. subexp_num = dfa->nodes[top_node].opr.idx;
  2601. /* Extend the buffer if we need. */
  2602. if (BE (path->alloc < last_str + mctx->max_mb_elem_len + 1, 0))
  2603. {
  2604. re_dfastate_t **new_array;
  2605. int old_alloc = path->alloc;
  2606. path->alloc += last_str + mctx->max_mb_elem_len + 1;
  2607. new_array = re_realloc (path->array, re_dfastate_t *, path->alloc);
  2608. if (new_array == NULL)
  2609. {
  2610. path->alloc = old_alloc;
  2611. return REG_ESPACE;
  2612. }
  2613. path->array = new_array;
  2614. memset (new_array + old_alloc, '\0',
  2615. sizeof (re_dfastate_t *) * (path->alloc - old_alloc));
  2616. }
  2617. str_idx = path->next_idx == 0 ? top_str : path->next_idx;
  2618. /* Temporary modify MCTX. */
  2619. backup_state_log = mctx->state_log;
  2620. backup_cur_idx = mctx->input.cur_idx;
  2621. mctx->state_log = path->array;
  2622. mctx->input.cur_idx = str_idx;
  2623. /* Setup initial node set. */
  2624. context = re_string_context_at (&mctx->input, str_idx - 1, mctx->eflags);
  2625. if (str_idx == top_str)
  2626. {
  2627. err = re_node_set_init_1 (&next_nodes, top_node);
  2628. if (BE (err != REG_NOERROR, 0))
  2629. return err;
  2630. err = check_arrival_expand_ecl (dfa, &next_nodes, subexp_num, type);
  2631. if (BE (err != REG_NOERROR, 0))
  2632. {
  2633. re_node_set_free (&next_nodes);
  2634. return err;
  2635. }
  2636. }
  2637. else
  2638. {
  2639. cur_state = mctx->state_log[str_idx];
  2640. if (cur_state && cur_state->has_backref)
  2641. {
  2642. err = re_node_set_init_copy (&next_nodes, &cur_state->nodes);
  2643. if (BE ( err != REG_NOERROR, 0))
  2644. return err;
  2645. }
  2646. else
  2647. re_node_set_init_empty (&next_nodes);
  2648. }
  2649. if (str_idx == top_str || (cur_state && cur_state->has_backref))
  2650. {
  2651. if (next_nodes.nelem)
  2652. {
  2653. err = expand_bkref_cache (mctx, &next_nodes, str_idx,
  2654. subexp_num, type);
  2655. if (BE ( err != REG_NOERROR, 0))
  2656. {
  2657. re_node_set_free (&next_nodes);
  2658. return err;
  2659. }
  2660. }
  2661. cur_state = re_acquire_state_context (&err, dfa, &next_nodes, context);
  2662. if (BE (cur_state == NULL && err != REG_NOERROR, 0))
  2663. {
  2664. re_node_set_free (&next_nodes);
  2665. return err;
  2666. }
  2667. mctx->state_log[str_idx] = cur_state;
  2668. }
  2669. for (null_cnt = 0; str_idx < last_str && null_cnt <= mctx->max_mb_elem_len;)
  2670. {
  2671. re_node_set_empty (&next_nodes);
  2672. if (mctx->state_log[str_idx + 1])
  2673. {
  2674. err = re_node_set_merge (&next_nodes,
  2675. &mctx->state_log[str_idx + 1]->nodes);
  2676. if (BE (err != REG_NOERROR, 0))
  2677. {
  2678. re_node_set_free (&next_nodes);
  2679. return err;
  2680. }
  2681. }
  2682. if (cur_state)
  2683. {
  2684. err = check_arrival_add_next_nodes (mctx, str_idx,
  2685. &cur_state->non_eps_nodes, &next_nodes);
  2686. if (BE (err != REG_NOERROR, 0))
  2687. {
  2688. re_node_set_free (&next_nodes);
  2689. return err;
  2690. }
  2691. }
  2692. ++str_idx;
  2693. if (next_nodes.nelem)
  2694. {
  2695. err = check_arrival_expand_ecl (dfa, &next_nodes, subexp_num, type);
  2696. if (BE (err != REG_NOERROR, 0))
  2697. {
  2698. re_node_set_free (&next_nodes);
  2699. return err;
  2700. }
  2701. err = expand_bkref_cache (mctx, &next_nodes, str_idx,
  2702. subexp_num, type);
  2703. if (BE ( err != REG_NOERROR, 0))
  2704. {
  2705. re_node_set_free (&next_nodes);
  2706. return err;
  2707. }
  2708. }
  2709. context = re_string_context_at (&mctx->input, str_idx - 1, mctx->eflags);
  2710. cur_state = re_acquire_state_context (&err, dfa, &next_nodes, context);
  2711. if (BE (cur_state == NULL && err != REG_NOERROR, 0))
  2712. {
  2713. re_node_set_free (&next_nodes);
  2714. return err;
  2715. }
  2716. mctx->state_log[str_idx] = cur_state;
  2717. null_cnt = cur_state == NULL ? null_cnt + 1 : 0;
  2718. }
  2719. re_node_set_free (&next_nodes);
  2720. cur_nodes = (mctx->state_log[last_str] == NULL ? NULL
  2721. : &mctx->state_log[last_str]->nodes);
  2722. path->next_idx = str_idx;
  2723. /* Fix MCTX. */
  2724. mctx->state_log = backup_state_log;
  2725. mctx->input.cur_idx = backup_cur_idx;
  2726. /* Then check the current node set has the node LAST_NODE. */
  2727. if (cur_nodes != NULL && re_node_set_contains (cur_nodes, last_node))
  2728. return REG_NOERROR;
  2729. return REG_NOMATCH;
  2730. }
  2731. /* Helper functions for check_arrival. */
  2732. /* Calculate the destination nodes of CUR_NODES at STR_IDX, and append them
  2733. to NEXT_NODES.
  2734. TODO: This function is similar to the functions transit_state*(),
  2735. however this function has many additional works.
  2736. Can't we unify them? */
  2737. static reg_errcode_t
  2738. check_arrival_add_next_nodes (mctx, str_idx, cur_nodes, next_nodes)
  2739. re_match_context_t *mctx;
  2740. int str_idx;
  2741. re_node_set *cur_nodes, *next_nodes;
  2742. {
  2743. re_dfa_t *const dfa = mctx->dfa;
  2744. int result;
  2745. int cur_idx;
  2746. reg_errcode_t err;
  2747. re_node_set union_set;
  2748. re_node_set_init_empty (&union_set);
  2749. for (cur_idx = 0; cur_idx < cur_nodes->nelem; ++cur_idx)
  2750. {
  2751. int naccepted = 0;
  2752. int cur_node = cur_nodes->elems[cur_idx];
  2753. #ifdef DEBUG
  2754. re_token_type_t type = dfa->nodes[cur_node].type;
  2755. assert (!IS_EPSILON_NODE (type));
  2756. #endif
  2757. #ifdef RE_ENABLE_I18N
  2758. /* If the node may accept `multi byte'. */
  2759. if (dfa->nodes[cur_node].accept_mb)
  2760. {
  2761. naccepted = check_node_accept_bytes (dfa, cur_node, &mctx->input,
  2762. str_idx);
  2763. if (naccepted > 1)
  2764. {
  2765. re_dfastate_t *dest_state;
  2766. int next_node = dfa->nexts[cur_node];
  2767. int next_idx = str_idx + naccepted;
  2768. dest_state = mctx->state_log[next_idx];
  2769. re_node_set_empty (&union_set);
  2770. if (dest_state)
  2771. {
  2772. err = re_node_set_merge (&union_set, &dest_state->nodes);
  2773. if (BE (err != REG_NOERROR, 0))
  2774. {
  2775. re_node_set_free (&union_set);
  2776. return err;
  2777. }
  2778. }
  2779. result = re_node_set_insert (&union_set, next_node);
  2780. if (BE (result < 0, 0))
  2781. {
  2782. re_node_set_free (&union_set);
  2783. return REG_ESPACE;
  2784. }
  2785. mctx->state_log[next_idx] = re_acquire_state (&err, dfa,
  2786. &union_set);
  2787. if (BE (mctx->state_log[next_idx] == NULL
  2788. && err != REG_NOERROR, 0))
  2789. {
  2790. re_node_set_free (&union_set);
  2791. return err;
  2792. }
  2793. }
  2794. }
  2795. #endif /* RE_ENABLE_I18N */
  2796. if (naccepted
  2797. || check_node_accept (mctx, dfa->nodes + cur_node, str_idx))
  2798. {
  2799. result = re_node_set_insert (next_nodes, dfa->nexts[cur_node]);
  2800. if (BE (result < 0, 0))
  2801. {
  2802. re_node_set_free (&union_set);
  2803. return REG_ESPACE;
  2804. }
  2805. }
  2806. }
  2807. re_node_set_free (&union_set);
  2808. return REG_NOERROR;
  2809. }
  2810. /* For all the nodes in CUR_NODES, add the epsilon closures of them to
  2811. CUR_NODES, however exclude the nodes which are:
  2812. - inside the sub expression whose number is EX_SUBEXP, if FL_OPEN.
  2813. - out of the sub expression whose number is EX_SUBEXP, if !FL_OPEN.
  2814. */
  2815. static reg_errcode_t
  2816. check_arrival_expand_ecl (dfa, cur_nodes, ex_subexp, type)
  2817. re_dfa_t *dfa;
  2818. re_node_set *cur_nodes;
  2819. int ex_subexp, type;
  2820. {
  2821. reg_errcode_t err;
  2822. int idx, outside_node;
  2823. re_node_set new_nodes;
  2824. #ifdef DEBUG
  2825. assert (cur_nodes->nelem);
  2826. #endif
  2827. err = re_node_set_alloc (&new_nodes, cur_nodes->nelem);
  2828. if (BE (err != REG_NOERROR, 0))
  2829. return err;
  2830. /* Create a new node set NEW_NODES with the nodes which are epsilon
  2831. closures of the node in CUR_NODES. */
  2832. for (idx = 0; idx < cur_nodes->nelem; ++idx)
  2833. {
  2834. int cur_node = cur_nodes->elems[idx];
  2835. re_node_set *eclosure = dfa->eclosures + cur_node;
  2836. outside_node = find_subexp_node (dfa, eclosure, ex_subexp, type);
  2837. if (outside_node == -1)
  2838. {
  2839. /* There are no problematic nodes, just merge them. */
  2840. err = re_node_set_merge (&new_nodes, eclosure);
  2841. if (BE (err != REG_NOERROR, 0))
  2842. {
  2843. re_node_set_free (&new_nodes);
  2844. return err;
  2845. }
  2846. }
  2847. else
  2848. {
  2849. /* There are problematic nodes, re-calculate incrementally. */
  2850. err = check_arrival_expand_ecl_sub (dfa, &new_nodes, cur_node,
  2851. ex_subexp, type);
  2852. if (BE (err != REG_NOERROR, 0))
  2853. {
  2854. re_node_set_free (&new_nodes);
  2855. return err;
  2856. }
  2857. }
  2858. }
  2859. re_node_set_free (cur_nodes);
  2860. *cur_nodes = new_nodes;
  2861. return REG_NOERROR;
  2862. }
  2863. /* Helper function for check_arrival_expand_ecl.
  2864. Check incrementally the epsilon closure of TARGET, and if it isn't
  2865. problematic append it to DST_NODES. */
  2866. static reg_errcode_t
  2867. check_arrival_expand_ecl_sub (dfa, dst_nodes, target, ex_subexp, type)
  2868. re_dfa_t *dfa;
  2869. int target, ex_subexp, type;
  2870. re_node_set *dst_nodes;
  2871. {
  2872. int cur_node;
  2873. for (cur_node = target; !re_node_set_contains (dst_nodes, cur_node);)
  2874. {
  2875. int err;
  2876. if (dfa->nodes[cur_node].type == type
  2877. && dfa->nodes[cur_node].opr.idx == ex_subexp)
  2878. {
  2879. if (type == OP_CLOSE_SUBEXP)
  2880. {
  2881. err = re_node_set_insert (dst_nodes, cur_node);
  2882. if (BE (err == -1, 0))
  2883. return REG_ESPACE;
  2884. }
  2885. break;
  2886. }
  2887. err = re_node_set_insert (dst_nodes, cur_node);
  2888. if (BE (err == -1, 0))
  2889. return REG_ESPACE;
  2890. if (dfa->edests[cur_node].nelem == 0)
  2891. break;
  2892. if (dfa->edests[cur_node].nelem == 2)
  2893. {
  2894. err = check_arrival_expand_ecl_sub (dfa, dst_nodes,
  2895. dfa->edests[cur_node].elems[1],
  2896. ex_subexp, type);
  2897. if (BE (err != REG_NOERROR, 0))
  2898. return err;
  2899. }
  2900. cur_node = dfa->edests[cur_node].elems[0];
  2901. }
  2902. return REG_NOERROR;
  2903. }
  2904. /* For all the back references in the current state, calculate the
  2905. destination of the back references by the appropriate entry
  2906. in MCTX->BKREF_ENTS. */
  2907. static reg_errcode_t
  2908. expand_bkref_cache (mctx, cur_nodes, cur_str, subexp_num,
  2909. type)
  2910. re_match_context_t *mctx;
  2911. int cur_str, subexp_num, type;
  2912. re_node_set *cur_nodes;
  2913. {
  2914. re_dfa_t *const dfa = mctx->dfa;
  2915. reg_errcode_t err;
  2916. int cache_idx_start = search_cur_bkref_entry (mctx, cur_str);
  2917. struct re_backref_cache_entry *ent;
  2918. if (cache_idx_start == -1)
  2919. return REG_NOERROR;
  2920. restart:
  2921. ent = mctx->bkref_ents + cache_idx_start;
  2922. do
  2923. {
  2924. int to_idx, next_node;
  2925. /* Is this entry ENT is appropriate? */
  2926. if (!re_node_set_contains (cur_nodes, ent->node))
  2927. continue; /* No. */
  2928. to_idx = cur_str + ent->subexp_to - ent->subexp_from;
  2929. /* Calculate the destination of the back reference, and append it
  2930. to MCTX->STATE_LOG. */
  2931. if (to_idx == cur_str)
  2932. {
  2933. /* The backreference did epsilon transit, we must re-check all the
  2934. node in the current state. */
  2935. re_node_set new_dests;
  2936. reg_errcode_t err2, err3;
  2937. next_node = dfa->edests[ent->node].elems[0];
  2938. if (re_node_set_contains (cur_nodes, next_node))
  2939. continue;
  2940. err = re_node_set_init_1 (&new_dests, next_node);
  2941. err2 = check_arrival_expand_ecl (dfa, &new_dests, subexp_num, type);
  2942. err3 = re_node_set_merge (cur_nodes, &new_dests);
  2943. re_node_set_free (&new_dests);
  2944. if (BE (err != REG_NOERROR || err2 != REG_NOERROR
  2945. || err3 != REG_NOERROR, 0))
  2946. {
  2947. err = (err != REG_NOERROR ? err
  2948. : (err2 != REG_NOERROR ? err2 : err3));
  2949. return err;
  2950. }
  2951. /* TODO: It is still inefficient... */
  2952. goto restart;
  2953. }
  2954. else
  2955. {
  2956. re_node_set union_set;
  2957. next_node = dfa->nexts[ent->node];
  2958. if (mctx->state_log[to_idx])
  2959. {
  2960. int ret;
  2961. if (re_node_set_contains (&mctx->state_log[to_idx]->nodes,
  2962. next_node))
  2963. continue;
  2964. err = re_node_set_init_copy (&union_set,
  2965. &mctx->state_log[to_idx]->nodes);
  2966. ret = re_node_set_insert (&union_set, next_node);
  2967. if (BE (err != REG_NOERROR || ret < 0, 0))
  2968. {
  2969. re_node_set_free (&union_set);
  2970. err = err != REG_NOERROR ? err : REG_ESPACE;
  2971. return err;
  2972. }
  2973. }
  2974. else
  2975. {
  2976. err = re_node_set_init_1 (&union_set, next_node);
  2977. if (BE (err != REG_NOERROR, 0))
  2978. return err;
  2979. }
  2980. mctx->state_log[to_idx] = re_acquire_state (&err, dfa, &union_set);
  2981. re_node_set_free (&union_set);
  2982. if (BE (mctx->state_log[to_idx] == NULL
  2983. && err != REG_NOERROR, 0))
  2984. return err;
  2985. }
  2986. }
  2987. while (ent++->more);
  2988. return REG_NOERROR;
  2989. }
  2990. /* Build transition table for the state.
  2991. Return 1 if succeeded, otherwise return NULL. */
  2992. static int
  2993. build_trtable (dfa, state)
  2994. re_dfa_t *dfa;
  2995. re_dfastate_t *state;
  2996. {
  2997. reg_errcode_t err;
  2998. int i, j, ch, need_word_trtable = 0;
  2999. unsigned int elem, mask;
  3000. int dests_node_malloced = 0, dest_states_malloced = 0;
  3001. int ndests; /* Number of the destination states from `state'. */
  3002. re_dfastate_t **trtable;
  3003. re_dfastate_t **dest_states = NULL, **dest_states_word, **dest_states_nl;
  3004. re_node_set follows, *dests_node;
  3005. bitset *dests_ch;
  3006. bitset acceptable;
  3007. /* We build DFA states which corresponds to the destination nodes
  3008. from `state'. `dests_node[i]' represents the nodes which i-th
  3009. destination state contains, and `dests_ch[i]' represents the
  3010. characters which i-th destination state accepts. */
  3011. #ifdef _LIBC
  3012. if (__libc_use_alloca ((sizeof (re_node_set) + sizeof (bitset)) * SBC_MAX))
  3013. dests_node = (re_node_set *)
  3014. alloca ((sizeof (re_node_set) + sizeof (bitset)) * SBC_MAX);
  3015. else
  3016. #endif
  3017. {
  3018. dests_node = (re_node_set *)
  3019. malloc ((sizeof (re_node_set) + sizeof (bitset)) * SBC_MAX);
  3020. if (BE (dests_node == NULL, 0))
  3021. return 0;
  3022. dests_node_malloced = 1;
  3023. }
  3024. dests_ch = (bitset *) (dests_node + SBC_MAX);
  3025. /* Initialize transiton table. */
  3026. state->word_trtable = state->trtable = NULL;
  3027. /* At first, group all nodes belonging to `state' into several
  3028. destinations. */
  3029. ndests = group_nodes_into_DFAstates (dfa, state, dests_node, dests_ch);
  3030. if (BE (ndests <= 0, 0))
  3031. {
  3032. if (dests_node_malloced)
  3033. free (dests_node);
  3034. /* Return 0 in case of an error, 1 otherwise. */
  3035. if (ndests == 0)
  3036. {
  3037. state->trtable = (re_dfastate_t **)
  3038. calloc (sizeof (re_dfastate_t *), SBC_MAX);
  3039. return 1;
  3040. }
  3041. return 0;
  3042. }
  3043. err = re_node_set_alloc (&follows, ndests + 1);
  3044. if (BE (err != REG_NOERROR, 0))
  3045. goto out_free;
  3046. #ifdef _LIBC
  3047. if (__libc_use_alloca ((sizeof (re_node_set) + sizeof (bitset)) * SBC_MAX
  3048. + ndests * 3 * sizeof (re_dfastate_t *)))
  3049. dest_states = (re_dfastate_t **)
  3050. alloca (ndests * 3 * sizeof (re_dfastate_t *));
  3051. else
  3052. #endif
  3053. {
  3054. dest_states = (re_dfastate_t **)
  3055. malloc (ndests * 3 * sizeof (re_dfastate_t *));
  3056. if (BE (dest_states == NULL, 0))
  3057. {
  3058. out_free:
  3059. if (dest_states_malloced)
  3060. free (dest_states);
  3061. re_node_set_free (&follows);
  3062. for (i = 0; i < ndests; ++i)
  3063. re_node_set_free (dests_node + i);
  3064. if (dests_node_malloced)
  3065. free (dests_node);
  3066. return 0;
  3067. }
  3068. dest_states_malloced = 1;
  3069. }
  3070. dest_states_word = dest_states + ndests;
  3071. dest_states_nl = dest_states_word + ndests;
  3072. bitset_empty (acceptable);
  3073. /* Then build the states for all destinations. */
  3074. for (i = 0; i < ndests; ++i)
  3075. {
  3076. int next_node;
  3077. re_node_set_empty (&follows);
  3078. /* Merge the follows of this destination states. */
  3079. for (j = 0; j < dests_node[i].nelem; ++j)
  3080. {
  3081. next_node = dfa->nexts[dests_node[i].elems[j]];
  3082. if (next_node != -1)
  3083. {
  3084. err = re_node_set_merge (&follows, dfa->eclosures + next_node);
  3085. if (BE (err != REG_NOERROR, 0))
  3086. goto out_free;
  3087. }
  3088. }
  3089. dest_states[i] = re_acquire_state_context (&err, dfa, &follows, 0);
  3090. if (BE (dest_states[i] == NULL && err != REG_NOERROR, 0))
  3091. goto out_free;
  3092. /* If the new state has context constraint,
  3093. build appropriate states for these contexts. */
  3094. if (dest_states[i]->has_constraint)
  3095. {
  3096. dest_states_word[i] = re_acquire_state_context (&err, dfa, &follows,
  3097. CONTEXT_WORD);
  3098. if (BE (dest_states_word[i] == NULL && err != REG_NOERROR, 0))
  3099. goto out_free;
  3100. if (dest_states[i] != dest_states_word[i] && dfa->mb_cur_max > 1)
  3101. need_word_trtable = 1;
  3102. dest_states_nl[i] = re_acquire_state_context (&err, dfa, &follows,
  3103. CONTEXT_NEWLINE);
  3104. if (BE (dest_states_nl[i] == NULL && err != REG_NOERROR, 0))
  3105. goto out_free;
  3106. }
  3107. else
  3108. {
  3109. dest_states_word[i] = dest_states[i];
  3110. dest_states_nl[i] = dest_states[i];
  3111. }
  3112. bitset_merge (acceptable, dests_ch[i]);
  3113. }
  3114. if (!BE (need_word_trtable, 0))
  3115. {
  3116. /* We don't care about whether the following character is a word
  3117. character, or we are in a single-byte character set so we can
  3118. discern by looking at the character code: allocate a
  3119. 256-entry transition table. */
  3120. trtable = state->trtable =
  3121. (re_dfastate_t **) calloc (sizeof (re_dfastate_t *), SBC_MAX);
  3122. if (BE (trtable == NULL, 0))
  3123. goto out_free;
  3124. /* For all characters ch...: */
  3125. for (i = 0; i < BITSET_UINTS; ++i)
  3126. for (ch = i * UINT_BITS, elem = acceptable[i], mask = 1;
  3127. elem;
  3128. mask <<= 1, elem >>= 1, ++ch)
  3129. if (BE (elem & 1, 0))
  3130. {
  3131. /* There must be exactly one destination which accepts
  3132. character ch. See group_nodes_into_DFAstates. */
  3133. for (j = 0; (dests_ch[j][i] & mask) == 0; ++j)
  3134. ;
  3135. /* j-th destination accepts the word character ch. */
  3136. if (dfa->word_char[i] & mask)
  3137. trtable[ch] = dest_states_word[j];
  3138. else
  3139. trtable[ch] = dest_states[j];
  3140. }
  3141. }
  3142. else
  3143. {
  3144. /* We care about whether the following character is a word
  3145. character, and we are in a multi-byte character set: discern
  3146. by looking at the character code: build two 256-entry
  3147. transition tables, one starting at trtable[0] and one
  3148. starting at trtable[SBC_MAX]. */
  3149. trtable = state->word_trtable =
  3150. (re_dfastate_t **) calloc (sizeof (re_dfastate_t *), 2 * SBC_MAX);
  3151. if (BE (trtable == NULL, 0))
  3152. goto out_free;
  3153. /* For all characters ch...: */
  3154. for (i = 0; i < BITSET_UINTS; ++i)
  3155. for (ch = i * UINT_BITS, elem = acceptable[i], mask = 1;
  3156. elem;
  3157. mask <<= 1, elem >>= 1, ++ch)
  3158. if (BE (elem & 1, 0))
  3159. {
  3160. /* There must be exactly one destination which accepts
  3161. character ch. See group_nodes_into_DFAstates. */
  3162. for (j = 0; (dests_ch[j][i] & mask) == 0; ++j)
  3163. ;
  3164. /* j-th destination accepts the word character ch. */
  3165. trtable[ch] = dest_states[j];
  3166. trtable[ch + SBC_MAX] = dest_states_word[j];
  3167. }
  3168. }
  3169. /* new line */
  3170. if (bitset_contain (acceptable, NEWLINE_CHAR))
  3171. {
  3172. /* The current state accepts newline character. */
  3173. for (j = 0; j < ndests; ++j)
  3174. if (bitset_contain (dests_ch[j], NEWLINE_CHAR))
  3175. {
  3176. /* k-th destination accepts newline character. */
  3177. trtable[NEWLINE_CHAR] = dest_states_nl[j];
  3178. if (need_word_trtable)
  3179. trtable[NEWLINE_CHAR + SBC_MAX] = dest_states_nl[j];
  3180. /* There must be only one destination which accepts
  3181. newline. See group_nodes_into_DFAstates. */
  3182. break;
  3183. }
  3184. }
  3185. if (dest_states_malloced)
  3186. free (dest_states);
  3187. re_node_set_free (&follows);
  3188. for (i = 0; i < ndests; ++i)
  3189. re_node_set_free (dests_node + i);
  3190. if (dests_node_malloced)
  3191. free (dests_node);
  3192. return 1;
  3193. }
  3194. /* Group all nodes belonging to STATE into several destinations.
  3195. Then for all destinations, set the nodes belonging to the destination
  3196. to DESTS_NODE[i] and set the characters accepted by the destination
  3197. to DEST_CH[i]. This function return the number of destinations. */
  3198. static int
  3199. group_nodes_into_DFAstates (dfa, state, dests_node, dests_ch)
  3200. re_dfa_t *dfa;
  3201. const re_dfastate_t *state;
  3202. re_node_set *dests_node;
  3203. bitset *dests_ch;
  3204. {
  3205. reg_errcode_t err;
  3206. int result;
  3207. int i, j, k;
  3208. int ndests; /* Number of the destinations from `state'. */
  3209. bitset accepts; /* Characters a node can accept. */
  3210. const re_node_set *cur_nodes = &state->nodes;
  3211. bitset_empty (accepts);
  3212. ndests = 0;
  3213. /* For all the nodes belonging to `state', */
  3214. for (i = 0; i < cur_nodes->nelem; ++i)
  3215. {
  3216. re_token_t *node = &dfa->nodes[cur_nodes->elems[i]];
  3217. re_token_type_t type = node->type;
  3218. unsigned int constraint = node->constraint;
  3219. /* Enumerate all single byte character this node can accept. */
  3220. if (type == CHARACTER)
  3221. bitset_set (accepts, node->opr.c);
  3222. else if (type == SIMPLE_BRACKET)
  3223. {
  3224. bitset_merge (accepts, node->opr.sbcset);
  3225. }
  3226. else if (type == OP_PERIOD)
  3227. {
  3228. #ifdef RE_ENABLE_I18N
  3229. if (dfa->mb_cur_max > 1)
  3230. bitset_merge (accepts, dfa->sb_char);
  3231. else
  3232. #endif
  3233. bitset_set_all (accepts);
  3234. if (!(dfa->syntax & RE_DOT_NEWLINE))
  3235. bitset_clear (accepts, '\n');
  3236. if (dfa->syntax & RE_DOT_NOT_NULL)
  3237. bitset_clear (accepts, '\0');
  3238. }
  3239. #ifdef RE_ENABLE_I18N
  3240. else if (type == OP_UTF8_PERIOD)
  3241. {
  3242. memset (accepts, 255, sizeof (unsigned int) * BITSET_UINTS / 2);
  3243. if (!(dfa->syntax & RE_DOT_NEWLINE))
  3244. bitset_clear (accepts, '\n');
  3245. if (dfa->syntax & RE_DOT_NOT_NULL)
  3246. bitset_clear (accepts, '\0');
  3247. }
  3248. #endif
  3249. else
  3250. continue;
  3251. /* Check the `accepts' and sift the characters which are not
  3252. match it the context. */
  3253. if (constraint)
  3254. {
  3255. if (constraint & NEXT_NEWLINE_CONSTRAINT)
  3256. {
  3257. int accepts_newline = bitset_contain (accepts, NEWLINE_CHAR);
  3258. bitset_empty (accepts);
  3259. if (accepts_newline)
  3260. bitset_set (accepts, NEWLINE_CHAR);
  3261. else
  3262. continue;
  3263. }
  3264. if (constraint & NEXT_ENDBUF_CONSTRAINT)
  3265. {
  3266. bitset_empty (accepts);
  3267. continue;
  3268. }
  3269. if (constraint & NEXT_WORD_CONSTRAINT)
  3270. {
  3271. unsigned int any_set = 0;
  3272. if (type == CHARACTER && !node->word_char)
  3273. {
  3274. bitset_empty (accepts);
  3275. continue;
  3276. }
  3277. #ifdef RE_ENABLE_I18N
  3278. if (dfa->mb_cur_max > 1)
  3279. for (j = 0; j < BITSET_UINTS; ++j)
  3280. any_set |= (accepts[j] &= (dfa->word_char[j] | ~dfa->sb_char[j]));
  3281. else
  3282. #endif
  3283. for (j = 0; j < BITSET_UINTS; ++j)
  3284. any_set |= (accepts[j] &= dfa->word_char[j]);
  3285. if (!any_set)
  3286. continue;
  3287. }
  3288. if (constraint & NEXT_NOTWORD_CONSTRAINT)
  3289. {
  3290. unsigned int any_set = 0;
  3291. if (type == CHARACTER && node->word_char)
  3292. {
  3293. bitset_empty (accepts);
  3294. continue;
  3295. }
  3296. #ifdef RE_ENABLE_I18N
  3297. if (dfa->mb_cur_max > 1)
  3298. for (j = 0; j < BITSET_UINTS; ++j)
  3299. any_set |= (accepts[j] &= ~(dfa->word_char[j] & dfa->sb_char[j]));
  3300. else
  3301. #endif
  3302. for (j = 0; j < BITSET_UINTS; ++j)
  3303. any_set |= (accepts[j] &= ~dfa->word_char[j]);
  3304. if (!any_set)
  3305. continue;
  3306. }
  3307. }
  3308. /* Then divide `accepts' into DFA states, or create a new
  3309. state. Above, we make sure that accepts is not empty. */
  3310. for (j = 0; j < ndests; ++j)
  3311. {
  3312. bitset intersec; /* Intersection sets, see below. */
  3313. bitset remains;
  3314. /* Flags, see below. */
  3315. int has_intersec, not_subset, not_consumed;
  3316. /* Optimization, skip if this state doesn't accept the character. */
  3317. if (type == CHARACTER && !bitset_contain (dests_ch[j], node->opr.c))
  3318. continue;
  3319. /* Enumerate the intersection set of this state and `accepts'. */
  3320. has_intersec = 0;
  3321. for (k = 0; k < BITSET_UINTS; ++k)
  3322. has_intersec |= intersec[k] = accepts[k] & dests_ch[j][k];
  3323. /* And skip if the intersection set is empty. */
  3324. if (!has_intersec)
  3325. continue;
  3326. /* Then check if this state is a subset of `accepts'. */
  3327. not_subset = not_consumed = 0;
  3328. for (k = 0; k < BITSET_UINTS; ++k)
  3329. {
  3330. not_subset |= remains[k] = ~accepts[k] & dests_ch[j][k];
  3331. not_consumed |= accepts[k] = accepts[k] & ~dests_ch[j][k];
  3332. }
  3333. /* If this state isn't a subset of `accepts', create a
  3334. new group state, which has the `remains'. */
  3335. if (not_subset)
  3336. {
  3337. bitset_copy (dests_ch[ndests], remains);
  3338. bitset_copy (dests_ch[j], intersec);
  3339. err = re_node_set_init_copy (dests_node + ndests, &dests_node[j]);
  3340. if (BE (err != REG_NOERROR, 0))
  3341. goto error_return;
  3342. ++ndests;
  3343. }
  3344. /* Put the position in the current group. */
  3345. result = re_node_set_insert (&dests_node[j], cur_nodes->elems[i]);
  3346. if (BE (result < 0, 0))
  3347. goto error_return;
  3348. /* If all characters are consumed, go to next node. */
  3349. if (!not_consumed)
  3350. break;
  3351. }
  3352. /* Some characters remain, create a new group. */
  3353. if (j == ndests)
  3354. {
  3355. bitset_copy (dests_ch[ndests], accepts);
  3356. err = re_node_set_init_1 (dests_node + ndests, cur_nodes->elems[i]);
  3357. if (BE (err != REG_NOERROR, 0))
  3358. goto error_return;
  3359. ++ndests;
  3360. bitset_empty (accepts);
  3361. }
  3362. }
  3363. return ndests;
  3364. error_return:
  3365. for (j = 0; j < ndests; ++j)
  3366. re_node_set_free (dests_node + j);
  3367. return -1;
  3368. }
  3369. #ifdef RE_ENABLE_I18N
  3370. /* Check how many bytes the node `dfa->nodes[node_idx]' accepts.
  3371. Return the number of the bytes the node accepts.
  3372. STR_IDX is the current index of the input string.
  3373. This function handles the nodes which can accept one character, or
  3374. one collating element like '.', '[a-z]', opposite to the other nodes
  3375. can only accept one byte. */
  3376. static int
  3377. check_node_accept_bytes (dfa, node_idx, input, str_idx)
  3378. re_dfa_t *dfa;
  3379. int node_idx, str_idx;
  3380. const re_string_t *input;
  3381. {
  3382. const re_token_t *node = dfa->nodes + node_idx;
  3383. int char_len, elem_len;
  3384. int i;
  3385. if (BE (node->type == OP_UTF8_PERIOD, 0))
  3386. {
  3387. unsigned char c = re_string_byte_at (input, str_idx), d;
  3388. if (BE (c < 0xc2, 1))
  3389. return 0;
  3390. if (str_idx + 2 > input->len)
  3391. return 0;
  3392. d = re_string_byte_at (input, str_idx + 1);
  3393. if (c < 0xe0)
  3394. return (d < 0x80 || d > 0xbf) ? 0 : 2;
  3395. else if (c < 0xf0)
  3396. {
  3397. char_len = 3;
  3398. if (c == 0xe0 && d < 0xa0)
  3399. return 0;
  3400. }
  3401. else if (c < 0xf8)
  3402. {
  3403. char_len = 4;
  3404. if (c == 0xf0 && d < 0x90)
  3405. return 0;
  3406. }
  3407. else if (c < 0xfc)
  3408. {
  3409. char_len = 5;
  3410. if (c == 0xf8 && d < 0x88)
  3411. return 0;
  3412. }
  3413. else if (c < 0xfe)
  3414. {
  3415. char_len = 6;
  3416. if (c == 0xfc && d < 0x84)
  3417. return 0;
  3418. }
  3419. else
  3420. return 0;
  3421. if (str_idx + char_len > input->len)
  3422. return 0;
  3423. for (i = 1; i < char_len; ++i)
  3424. {
  3425. d = re_string_byte_at (input, str_idx + i);
  3426. if (d < 0x80 || d > 0xbf)
  3427. return 0;
  3428. }
  3429. return char_len;
  3430. }
  3431. char_len = re_string_char_size_at (input, str_idx);
  3432. if (node->type == OP_PERIOD)
  3433. {
  3434. if (char_len <= 1)
  3435. return 0;
  3436. /* FIXME: I don't think this if is needed, as both '\n'
  3437. and '\0' are char_len == 1. */
  3438. /* '.' accepts any one character except the following two cases. */
  3439. if ((!(dfa->syntax & RE_DOT_NEWLINE) &&
  3440. re_string_byte_at (input, str_idx) == '\n') ||
  3441. ((dfa->syntax & RE_DOT_NOT_NULL) &&
  3442. re_string_byte_at (input, str_idx) == '\0'))
  3443. return 0;
  3444. return char_len;
  3445. }
  3446. elem_len = re_string_elem_size_at (input, str_idx);
  3447. if ((elem_len <= 1 && char_len <= 1) || char_len == 0)
  3448. return 0;
  3449. if (node->type == COMPLEX_BRACKET)
  3450. {
  3451. const re_charset_t *cset = node->opr.mbcset;
  3452. # ifdef _LIBC
  3453. const unsigned char *pin
  3454. = ((const unsigned char *) re_string_get_buffer (input) + str_idx);
  3455. int j;
  3456. uint32_t nrules;
  3457. # endif /* _LIBC */
  3458. int match_len = 0;
  3459. wchar_t wc = ((cset->nranges || cset->nchar_classes || cset->nmbchars)
  3460. ? re_string_wchar_at (input, str_idx) : 0);
  3461. /* match with multibyte character? */
  3462. for (i = 0; i < cset->nmbchars; ++i)
  3463. if (wc == cset->mbchars[i])
  3464. {
  3465. match_len = char_len;
  3466. goto check_node_accept_bytes_match;
  3467. }
  3468. /* match with character_class? */
  3469. for (i = 0; i < cset->nchar_classes; ++i)
  3470. {
  3471. wctype_t wt = cset->char_classes[i];
  3472. if (__iswctype (wc, wt))
  3473. {
  3474. match_len = char_len;
  3475. goto check_node_accept_bytes_match;
  3476. }
  3477. }
  3478. # ifdef _LIBC
  3479. nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
  3480. if (nrules != 0)
  3481. {
  3482. unsigned int in_collseq = 0;
  3483. const int32_t *table, *indirect;
  3484. const unsigned char *weights, *extra;
  3485. const char *collseqwc;
  3486. int32_t idx;
  3487. /* This #include defines a local function! */
  3488. # include <locale/weight.h>
  3489. /* match with collating_symbol? */
  3490. if (cset->ncoll_syms)
  3491. extra = (const unsigned char *)
  3492. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_SYMB_EXTRAMB);
  3493. for (i = 0; i < cset->ncoll_syms; ++i)
  3494. {
  3495. const unsigned char *coll_sym = extra + cset->coll_syms[i];
  3496. /* Compare the length of input collating element and
  3497. the length of current collating element. */
  3498. if (*coll_sym != elem_len)
  3499. continue;
  3500. /* Compare each bytes. */
  3501. for (j = 0; j < *coll_sym; j++)
  3502. if (pin[j] != coll_sym[1 + j])
  3503. break;
  3504. if (j == *coll_sym)
  3505. {
  3506. /* Match if every bytes is equal. */
  3507. match_len = j;
  3508. goto check_node_accept_bytes_match;
  3509. }
  3510. }
  3511. if (cset->nranges)
  3512. {
  3513. if (elem_len <= char_len)
  3514. {
  3515. collseqwc = _NL_CURRENT (LC_COLLATE, _NL_COLLATE_COLLSEQWC);
  3516. in_collseq = __collseq_table_lookup (collseqwc, wc);
  3517. }
  3518. else
  3519. in_collseq = find_collation_sequence_value (pin, elem_len);
  3520. }
  3521. /* match with range expression? */
  3522. for (i = 0; i < cset->nranges; ++i)
  3523. if (cset->range_starts[i] <= in_collseq
  3524. && in_collseq <= cset->range_ends[i])
  3525. {
  3526. match_len = elem_len;
  3527. goto check_node_accept_bytes_match;
  3528. }
  3529. /* match with equivalence_class? */
  3530. if (cset->nequiv_classes)
  3531. {
  3532. const unsigned char *cp = pin;
  3533. table = (const int32_t *)
  3534. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_TABLEMB);
  3535. weights = (const unsigned char *)
  3536. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_WEIGHTMB);
  3537. extra = (const unsigned char *)
  3538. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_EXTRAMB);
  3539. indirect = (const int32_t *)
  3540. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_INDIRECTMB);
  3541. idx = findidx (&cp);
  3542. if (idx > 0)
  3543. for (i = 0; i < cset->nequiv_classes; ++i)
  3544. {
  3545. int32_t equiv_class_idx = cset->equiv_classes[i];
  3546. size_t weight_len = weights[idx];
  3547. if (weight_len == weights[equiv_class_idx])
  3548. {
  3549. int cnt = 0;
  3550. while (cnt <= weight_len
  3551. && (weights[equiv_class_idx + 1 + cnt]
  3552. == weights[idx + 1 + cnt]))
  3553. ++cnt;
  3554. if (cnt > weight_len)
  3555. {
  3556. match_len = elem_len;
  3557. goto check_node_accept_bytes_match;
  3558. }
  3559. }
  3560. }
  3561. }
  3562. }
  3563. else
  3564. # endif /* _LIBC */
  3565. {
  3566. /* match with range expression? */
  3567. #if __GNUC__ >= 2
  3568. wchar_t cmp_buf[] = {L'\0', L'\0', wc, L'\0', L'\0', L'\0'};
  3569. #else
  3570. wchar_t cmp_buf[] = {L'\0', L'\0', L'\0', L'\0', L'\0', L'\0'};
  3571. cmp_buf[2] = wc;
  3572. #endif
  3573. for (i = 0; i < cset->nranges; ++i)
  3574. {
  3575. cmp_buf[0] = cset->range_starts[i];
  3576. cmp_buf[4] = cset->range_ends[i];
  3577. if (wcscoll (cmp_buf, cmp_buf + 2) <= 0
  3578. && wcscoll (cmp_buf + 2, cmp_buf + 4) <= 0)
  3579. {
  3580. match_len = char_len;
  3581. goto check_node_accept_bytes_match;
  3582. }
  3583. }
  3584. }
  3585. check_node_accept_bytes_match:
  3586. if (!cset->non_match)
  3587. return match_len;
  3588. else
  3589. {
  3590. if (match_len > 0)
  3591. return 0;
  3592. else
  3593. return (elem_len > char_len) ? elem_len : char_len;
  3594. }
  3595. }
  3596. return 0;
  3597. }
  3598. # ifdef _LIBC
  3599. static unsigned int
  3600. find_collation_sequence_value (mbs, mbs_len)
  3601. const unsigned char *mbs;
  3602. size_t mbs_len;
  3603. {
  3604. uint32_t nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
  3605. if (nrules == 0)
  3606. {
  3607. if (mbs_len == 1)
  3608. {
  3609. /* No valid character. Match it as a single byte character. */
  3610. const unsigned char *collseq = (const unsigned char *)
  3611. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_COLLSEQMB);
  3612. return collseq[mbs[0]];
  3613. }
  3614. return UINT_MAX;
  3615. }
  3616. else
  3617. {
  3618. int32_t idx;
  3619. const unsigned char *extra = (const unsigned char *)
  3620. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_SYMB_EXTRAMB);
  3621. int32_t extrasize = (const unsigned char *)
  3622. _NL_CURRENT (LC_COLLATE, _NL_COLLATE_SYMB_EXTRAMB + 1) - extra;
  3623. for (idx = 0; idx < extrasize;)
  3624. {
  3625. int mbs_cnt, found = 0;
  3626. int32_t elem_mbs_len;
  3627. /* Skip the name of collating element name. */
  3628. idx = idx + extra[idx] + 1;
  3629. elem_mbs_len = extra[idx++];
  3630. if (mbs_len == elem_mbs_len)
  3631. {
  3632. for (mbs_cnt = 0; mbs_cnt < elem_mbs_len; ++mbs_cnt)
  3633. if (extra[idx + mbs_cnt] != mbs[mbs_cnt])
  3634. break;
  3635. if (mbs_cnt == elem_mbs_len)
  3636. /* Found the entry. */
  3637. found = 1;
  3638. }
  3639. /* Skip the byte sequence of the collating element. */
  3640. idx += elem_mbs_len;
  3641. /* Adjust for the alignment. */
  3642. idx = (idx + 3) & ~3;
  3643. /* Skip the collation sequence value. */
  3644. idx += sizeof (uint32_t);
  3645. /* Skip the wide char sequence of the collating element. */
  3646. idx = idx + sizeof (uint32_t) * (extra[idx] + 1);
  3647. /* If we found the entry, return the sequence value. */
  3648. if (found)
  3649. return *(uint32_t *) (extra + idx);
  3650. /* Skip the collation sequence value. */
  3651. idx += sizeof (uint32_t);
  3652. }
  3653. return UINT_MAX;
  3654. }
  3655. }
  3656. # endif /* _LIBC */
  3657. #endif /* RE_ENABLE_I18N */
  3658. /* Check whether the node accepts the byte which is IDX-th
  3659. byte of the INPUT. */
  3660. static int
  3661. check_node_accept (mctx, node, idx)
  3662. const re_match_context_t *mctx;
  3663. const re_token_t *node;
  3664. int idx;
  3665. {
  3666. unsigned char ch;
  3667. ch = re_string_byte_at (&mctx->input, idx);
  3668. switch (node->type)
  3669. {
  3670. case CHARACTER:
  3671. if (node->opr.c != ch)
  3672. return 0;
  3673. break;
  3674. case SIMPLE_BRACKET:
  3675. if (!bitset_contain (node->opr.sbcset, ch))
  3676. return 0;
  3677. break;
  3678. #ifdef RE_ENABLE_I18N
  3679. case OP_UTF8_PERIOD:
  3680. if (ch >= 0x80)
  3681. return 0;
  3682. /* FALLTHROUGH */
  3683. #endif
  3684. case OP_PERIOD:
  3685. if ((ch == '\n' && !(mctx->dfa->syntax & RE_DOT_NEWLINE))
  3686. || (ch == '\0' && (mctx->dfa->syntax & RE_DOT_NOT_NULL)))
  3687. return 0;
  3688. break;
  3689. default:
  3690. return 0;
  3691. }
  3692. if (node->constraint)
  3693. {
  3694. /* The node has constraints. Check whether the current context
  3695. satisfies the constraints. */
  3696. unsigned int context = re_string_context_at (&mctx->input, idx,
  3697. mctx->eflags);
  3698. if (NOT_SATISFY_NEXT_CONSTRAINT (node->constraint, context))
  3699. return 0;
  3700. }
  3701. return 1;
  3702. }
  3703. /* Extend the buffers, if the buffers have run out. */
  3704. static reg_errcode_t
  3705. extend_buffers (mctx)
  3706. re_match_context_t *mctx;
  3707. {
  3708. reg_errcode_t ret;
  3709. re_string_t *pstr = &mctx->input;
  3710. /* Double the lengthes of the buffers. */
  3711. ret = re_string_realloc_buffers (pstr, pstr->bufs_len * 2);
  3712. if (BE (ret != REG_NOERROR, 0))
  3713. return ret;
  3714. if (mctx->state_log != NULL)
  3715. {
  3716. /* And double the length of state_log. */
  3717. /* XXX We have no indication of the size of this buffer. If this
  3718. allocation fail we have no indication that the state_log array
  3719. does not have the right size. */
  3720. re_dfastate_t **new_array = re_realloc (mctx->state_log, re_dfastate_t *,
  3721. pstr->bufs_len + 1);
  3722. if (BE (new_array == NULL, 0))
  3723. return REG_ESPACE;
  3724. mctx->state_log = new_array;
  3725. }
  3726. /* Then reconstruct the buffers. */
  3727. if (pstr->icase)
  3728. {
  3729. #ifdef RE_ENABLE_I18N
  3730. if (pstr->mb_cur_max > 1)
  3731. {
  3732. ret = build_wcs_upper_buffer (pstr);
  3733. if (BE (ret != REG_NOERROR, 0))
  3734. return ret;
  3735. }
  3736. else
  3737. #endif /* RE_ENABLE_I18N */
  3738. build_upper_buffer (pstr);
  3739. }
  3740. else
  3741. {
  3742. #ifdef RE_ENABLE_I18N
  3743. if (pstr->mb_cur_max > 1)
  3744. build_wcs_buffer (pstr);
  3745. else
  3746. #endif /* RE_ENABLE_I18N */
  3747. {
  3748. if (pstr->trans != NULL)
  3749. re_string_translate_buffer (pstr);
  3750. }
  3751. }
  3752. return REG_NOERROR;
  3753. }
  3754. /* Functions for matching context. */
  3755. /* Initialize MCTX. */
  3756. static reg_errcode_t
  3757. match_ctx_init (mctx, eflags, n)
  3758. re_match_context_t *mctx;
  3759. int eflags, n;
  3760. {
  3761. mctx->eflags = eflags;
  3762. mctx->match_last = -1;
  3763. if (n > 0)
  3764. {
  3765. mctx->bkref_ents = re_malloc (struct re_backref_cache_entry, n);
  3766. mctx->sub_tops = re_malloc (re_sub_match_top_t *, n);
  3767. if (BE (mctx->bkref_ents == NULL || mctx->sub_tops == NULL, 0))
  3768. return REG_ESPACE;
  3769. }
  3770. /* Already zero-ed by the caller.
  3771. else
  3772. mctx->bkref_ents = NULL;
  3773. mctx->nbkref_ents = 0;
  3774. mctx->nsub_tops = 0; */
  3775. mctx->abkref_ents = n;
  3776. mctx->max_mb_elem_len = 1;
  3777. mctx->asub_tops = n;
  3778. return REG_NOERROR;
  3779. }
  3780. /* Clean the entries which depend on the current input in MCTX.
  3781. This function must be invoked when the matcher changes the start index
  3782. of the input, or changes the input string. */
  3783. static void
  3784. match_ctx_clean (mctx)
  3785. re_match_context_t *mctx;
  3786. {
  3787. int st_idx;
  3788. for (st_idx = 0; st_idx < mctx->nsub_tops; ++st_idx)
  3789. {
  3790. int sl_idx;
  3791. re_sub_match_top_t *top = mctx->sub_tops[st_idx];
  3792. for (sl_idx = 0; sl_idx < top->nlasts; ++sl_idx)
  3793. {
  3794. re_sub_match_last_t *last = top->lasts[sl_idx];
  3795. re_free (last->path.array);
  3796. re_free (last);
  3797. }
  3798. re_free (top->lasts);
  3799. if (top->path)
  3800. {
  3801. re_free (top->path->array);
  3802. re_free (top->path);
  3803. }
  3804. free (top);
  3805. }
  3806. mctx->nsub_tops = 0;
  3807. mctx->nbkref_ents = 0;
  3808. }
  3809. /* Free all the memory associated with MCTX. */
  3810. static void
  3811. match_ctx_free (mctx)
  3812. re_match_context_t *mctx;
  3813. {
  3814. /* First, free all the memory associated with MCTX->SUB_TOPS. */
  3815. match_ctx_clean (mctx);
  3816. re_free (mctx->sub_tops);
  3817. re_free (mctx->bkref_ents);
  3818. }
  3819. /* Add a new backreference entry to MCTX.
  3820. Note that we assume that caller never call this function with duplicate
  3821. entry, and call with STR_IDX which isn't smaller than any existing entry.
  3822. */
  3823. static reg_errcode_t
  3824. match_ctx_add_entry (mctx, node, str_idx, from, to)
  3825. re_match_context_t *mctx;
  3826. int node, str_idx, from, to;
  3827. {
  3828. if (mctx->nbkref_ents >= mctx->abkref_ents)
  3829. {
  3830. struct re_backref_cache_entry* new_entry;
  3831. new_entry = re_realloc (mctx->bkref_ents, struct re_backref_cache_entry,
  3832. mctx->abkref_ents * 2);
  3833. if (BE (new_entry == NULL, 0))
  3834. {
  3835. re_free (mctx->bkref_ents);
  3836. return REG_ESPACE;
  3837. }
  3838. mctx->bkref_ents = new_entry;
  3839. memset (mctx->bkref_ents + mctx->nbkref_ents, '\0',
  3840. sizeof (struct re_backref_cache_entry) * mctx->abkref_ents);
  3841. mctx->abkref_ents *= 2;
  3842. }
  3843. if (mctx->nbkref_ents > 0
  3844. && mctx->bkref_ents[mctx->nbkref_ents - 1].str_idx == str_idx)
  3845. mctx->bkref_ents[mctx->nbkref_ents - 1].more = 1;
  3846. mctx->bkref_ents[mctx->nbkref_ents].node = node;
  3847. mctx->bkref_ents[mctx->nbkref_ents].str_idx = str_idx;
  3848. mctx->bkref_ents[mctx->nbkref_ents].subexp_from = from;
  3849. mctx->bkref_ents[mctx->nbkref_ents].subexp_to = to;
  3850. /* This is a cache that saves negative results of check_dst_limits_calc_pos.
  3851. If bit N is clear, means that this entry won't epsilon-transition to
  3852. an OP_OPEN_SUBEXP or OP_CLOSE_SUBEXP for the N+1-th subexpression. If
  3853. it is set, check_dst_limits_calc_pos_1 will recurse and try to find one
  3854. such node.
  3855. A backreference does not epsilon-transition unless it is empty, so set
  3856. to all zeros if FROM != TO. */
  3857. mctx->bkref_ents[mctx->nbkref_ents].eps_reachable_subexps_map
  3858. = (from == to ? ~0 : 0);
  3859. mctx->bkref_ents[mctx->nbkref_ents++].more = 0;
  3860. if (mctx->max_mb_elem_len < to - from)
  3861. mctx->max_mb_elem_len = to - from;
  3862. return REG_NOERROR;
  3863. }
  3864. /* Search for the first entry which has the same str_idx, or -1 if none is
  3865. found. Note that MCTX->BKREF_ENTS is already sorted by MCTX->STR_IDX. */
  3866. static int
  3867. search_cur_bkref_entry (mctx, str_idx)
  3868. re_match_context_t *mctx;
  3869. int str_idx;
  3870. {
  3871. int left, right, mid, last;
  3872. last = right = mctx->nbkref_ents;
  3873. for (left = 0; left < right;)
  3874. {
  3875. mid = (left + right) / 2;
  3876. if (mctx->bkref_ents[mid].str_idx < str_idx)
  3877. left = mid + 1;
  3878. else
  3879. right = mid;
  3880. }
  3881. if (left < last && mctx->bkref_ents[left].str_idx == str_idx)
  3882. return left;
  3883. else
  3884. return -1;
  3885. }
  3886. /* Register the node NODE, whose type is OP_OPEN_SUBEXP, and which matches
  3887. at STR_IDX. */
  3888. static reg_errcode_t
  3889. match_ctx_add_subtop (mctx, node, str_idx)
  3890. re_match_context_t *mctx;
  3891. int node, str_idx;
  3892. {
  3893. #ifdef DEBUG
  3894. assert (mctx->sub_tops != NULL);
  3895. assert (mctx->asub_tops > 0);
  3896. #endif
  3897. if (BE (mctx->nsub_tops == mctx->asub_tops, 0))
  3898. {
  3899. int new_asub_tops = mctx->asub_tops * 2;
  3900. re_sub_match_top_t **new_array = re_realloc (mctx->sub_tops,
  3901. re_sub_match_top_t *,
  3902. new_asub_tops);
  3903. if (BE (new_array == NULL, 0))
  3904. return REG_ESPACE;
  3905. mctx->sub_tops = new_array;
  3906. mctx->asub_tops = new_asub_tops;
  3907. }
  3908. mctx->sub_tops[mctx->nsub_tops] = calloc (1, sizeof (re_sub_match_top_t));
  3909. if (BE (mctx->sub_tops[mctx->nsub_tops] == NULL, 0))
  3910. return REG_ESPACE;
  3911. mctx->sub_tops[mctx->nsub_tops]->node = node;
  3912. mctx->sub_tops[mctx->nsub_tops++]->str_idx = str_idx;
  3913. return REG_NOERROR;
  3914. }
  3915. /* Register the node NODE, whose type is OP_CLOSE_SUBEXP, and which matches
  3916. at STR_IDX, whose corresponding OP_OPEN_SUBEXP is SUB_TOP. */
  3917. static re_sub_match_last_t *
  3918. match_ctx_add_sublast (subtop, node, str_idx)
  3919. re_sub_match_top_t *subtop;
  3920. int node, str_idx;
  3921. {
  3922. re_sub_match_last_t *new_entry;
  3923. if (BE (subtop->nlasts == subtop->alasts, 0))
  3924. {
  3925. int new_alasts = 2 * subtop->alasts + 1;
  3926. re_sub_match_last_t **new_array = re_realloc (subtop->lasts,
  3927. re_sub_match_last_t *,
  3928. new_alasts);
  3929. if (BE (new_array == NULL, 0))
  3930. return NULL;
  3931. subtop->lasts = new_array;
  3932. subtop->alasts = new_alasts;
  3933. }
  3934. new_entry = calloc (1, sizeof (re_sub_match_last_t));
  3935. if (BE (new_entry != NULL, 1))
  3936. {
  3937. subtop->lasts[subtop->nlasts] = new_entry;
  3938. new_entry->node = node;
  3939. new_entry->str_idx = str_idx;
  3940. ++subtop->nlasts;
  3941. }
  3942. return new_entry;
  3943. }
  3944. static void
  3945. sift_ctx_init (sctx, sifted_sts, limited_sts, last_node, last_str_idx)
  3946. re_sift_context_t *sctx;
  3947. re_dfastate_t **sifted_sts, **limited_sts;
  3948. int last_node, last_str_idx;
  3949. {
  3950. sctx->sifted_states = sifted_sts;
  3951. sctx->limited_states = limited_sts;
  3952. sctx->last_node = last_node;
  3953. sctx->last_str_idx = last_str_idx;
  3954. re_node_set_init_empty (&sctx->limits);
  3955. }