/source/otp_src_R14B02/erts/emulator/hipe/hipe_gc.c

https://github.com/cparedes/omnibus · C · 555 lines · 466 code · 46 blank · 43 comment · 133 complexity · dbe980e741dda1e8e1e2f65fa34a9388 MD5 · raw file

  1. /*
  2. * %CopyrightBegin%
  3. *
  4. * Copyright Ericsson AB 2004-2011. All Rights Reserved.
  5. *
  6. * The contents of this file are subject to the Erlang Public License,
  7. * Version 1.1, (the "License"); you may not use this file except in
  8. * compliance with the License. You should have received a copy of the
  9. * Erlang Public License along with this software. If not, it can be
  10. * retrieved online at http://www.erlang.org/.
  11. *
  12. * Software distributed under the License is distributed on an "AS IS"
  13. * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
  14. * the License for the specific language governing rights and limitations
  15. * under the License.
  16. *
  17. * %CopyrightEnd%
  18. */
  19. /* $Id$
  20. * GC support procedures
  21. */
  22. #ifdef HAVE_CONFIG_H
  23. #include "config.h"
  24. #endif
  25. #include "global.h"
  26. #include "erl_gc.h"
  27. #include "hipe_stack.h"
  28. #include "hipe_gc.h"
  29. Eterm *fullsweep_nstack(Process *p, Eterm *n_htop)
  30. {
  31. /* known nstack walk state */
  32. Eterm *nsp;
  33. Eterm *nsp_end;
  34. const struct sdesc *sdesc;
  35. unsigned int sdesc_size;
  36. unsigned long ra;
  37. unsigned int i;
  38. unsigned int mask;
  39. /* arch-specific nstack walk state */
  40. struct nstack_walk_state walk_state;
  41. /* fullsweep-specific state */
  42. char *src, *oh;
  43. Uint src_size, oh_size;
  44. if (!nstack_walk_init_check(p))
  45. return n_htop;
  46. nsp = nstack_walk_nsp_begin(p);
  47. nsp_end = p->hipe.nstgraylim;
  48. if (nsp_end)
  49. nstack_walk_kill_trap(p, nsp_end);
  50. nsp_end = nstack_walk_nsp_end(p);
  51. sdesc = nstack_walk_init_sdesc(p, &walk_state);
  52. src = (char*)HEAP_START(p);
  53. src_size = (char*)HEAP_TOP(p) - src;
  54. oh = (char*)OLD_HEAP(p);
  55. oh_size = (char*)OLD_HTOP(p) - oh;
  56. for (;;) {
  57. if (nstack_walk_nsp_reached_end(nsp, nsp_end)) {
  58. if (nsp == nsp_end) {
  59. if (nsp) {
  60. /* see the HIGH_WATER update in fullsweep_heap() */
  61. p->hipe.nstblacklim = nsp; /* nsp == nsp_end */
  62. nstack_walk_update_trap(p, walk_state.sdesc0);
  63. }
  64. return n_htop;
  65. }
  66. fprintf(stderr, "%s: passed end of stack\r\n", __FUNCTION__);
  67. break;
  68. }
  69. sdesc_size = nstack_walk_frame_size(sdesc);
  70. i = 0;
  71. mask = sdesc->livebits[0];
  72. for (;;) {
  73. if (mask & 1) {
  74. Eterm *nsp_i = nstack_walk_frame_index(nsp, i);
  75. Eterm gval = *nsp_i;
  76. if (is_boxed(gval)) {
  77. Eterm *ptr = boxed_val(gval);
  78. Eterm val = *ptr;
  79. if (IS_MOVED_BOXED(val)) {
  80. ASSERT(is_boxed(val));
  81. *nsp_i = val;
  82. } else if (in_area(ptr, src, src_size) ||
  83. in_area(ptr, oh, oh_size)) {
  84. MOVE_BOXED(ptr, val, n_htop, nsp_i);
  85. }
  86. } else if (is_list(gval)) {
  87. Eterm *ptr = list_val(gval);
  88. Eterm val = *ptr;
  89. if (IS_MOVED_CONS(val)) {
  90. *nsp_i = ptr[1];
  91. } else if (in_area(ptr, src, src_size) ||
  92. in_area(ptr, oh, oh_size)) {
  93. ASSERT(within(ptr, p));
  94. MOVE_CONS(ptr, val, n_htop, nsp_i);
  95. }
  96. }
  97. }
  98. if (++i >= sdesc_size)
  99. break;
  100. if (i & 31)
  101. mask >>= 1;
  102. else
  103. mask = sdesc->livebits[i >> 5];
  104. }
  105. ra = nstack_walk_frame_ra(nsp, sdesc);
  106. sdesc = hipe_find_sdesc(ra);
  107. nsp = nstack_walk_next_frame(nsp, sdesc_size);
  108. }
  109. abort();
  110. }
  111. void gensweep_nstack(Process *p, Eterm **ptr_old_htop, Eterm **ptr_n_htop)
  112. {
  113. /* known nstack walk state */
  114. Eterm *nsp;
  115. Eterm *nsp_end;
  116. const struct sdesc *sdesc;
  117. unsigned int sdesc_size;
  118. unsigned long ra;
  119. unsigned int i;
  120. unsigned int mask;
  121. /* arch-specific nstack walk state */
  122. struct nstack_walk_state walk_state;
  123. /* gensweep-specific state */
  124. Eterm *old_htop, *n_htop;
  125. char *heap;
  126. Uint heap_size, mature_size;
  127. if (!nstack_walk_init_check(p))
  128. return;
  129. nsp = nstack_walk_nsp_begin(p);
  130. nsp_end = p->hipe.nstgraylim;
  131. if (nsp_end) {
  132. /* if gray limit passed black limit, reset black limit */
  133. if (nstack_walk_gray_passed_black(nsp_end, p->hipe.nstblacklim))
  134. p->hipe.nstblacklim = nsp_end;
  135. nstack_walk_kill_trap(p, nsp_end);
  136. nsp_end = p->hipe.nstblacklim;
  137. } else
  138. nsp_end = nstack_walk_nsp_end(p);
  139. sdesc = nstack_walk_init_sdesc(p, &walk_state);
  140. old_htop = *ptr_old_htop;
  141. n_htop = *ptr_n_htop;
  142. heap = (char*)HEAP_START(p);
  143. heap_size = (char*)HEAP_TOP(p) - heap;
  144. mature_size = (char*)HIGH_WATER(p) - heap;
  145. for (;;) {
  146. if (nstack_walk_nsp_reached_end(nsp, nsp_end)) {
  147. if (nsp == nsp_end) {
  148. *ptr_old_htop = old_htop;
  149. *ptr_n_htop = n_htop;
  150. if (nsp) {
  151. /* see the HIGH_WATER update in gen_gc() */
  152. if (HEAP_START(p) != HIGH_WATER(p)) {
  153. p->hipe.nstblacklim =
  154. p->hipe.nstgraylim
  155. ? p->hipe.nstgraylim
  156. : nsp; /* nsp == nsp_end */
  157. } else {
  158. /* blacklim = graylim ? blacklim : end */
  159. if (!p->hipe.nstgraylim)
  160. p->hipe.nstblacklim = nsp; /* nsp == nsp_end */
  161. }
  162. nstack_walk_update_trap(p, walk_state.sdesc0);
  163. }
  164. return;
  165. }
  166. fprintf(stderr, "%s: passed end of stack\r\n", __FUNCTION__);
  167. break;
  168. }
  169. sdesc_size = nstack_walk_frame_size(sdesc);
  170. i = 0;
  171. mask = sdesc->livebits[0];
  172. for (;;) {
  173. if (mask & 1) {
  174. Eterm *nsp_i = nstack_walk_frame_index(nsp, i);
  175. Eterm gval = *nsp_i;
  176. if (is_boxed(gval)) {
  177. Eterm *ptr = boxed_val(gval);
  178. Eterm val = *ptr;
  179. if (IS_MOVED_BOXED(val)) {
  180. ASSERT(is_boxed(val));
  181. *nsp_i = val;
  182. } else if (in_area(ptr, heap, mature_size)) {
  183. MOVE_BOXED(ptr, val, old_htop, nsp_i);
  184. } else if (in_area(ptr, heap, heap_size)) {
  185. ASSERT(within(ptr, p));
  186. MOVE_BOXED(ptr, val, n_htop, nsp_i);
  187. }
  188. } else if (is_list(gval)) {
  189. Eterm *ptr = list_val(gval);
  190. Eterm val = *ptr;
  191. if (IS_MOVED_CONS(val)) {
  192. *nsp_i = ptr[1];
  193. } else if (in_area(ptr, heap, mature_size)) {
  194. MOVE_CONS(ptr, val, old_htop, nsp_i);
  195. } else if (in_area(ptr, heap, heap_size)) {
  196. ASSERT(within(ptr, p));
  197. MOVE_CONS(ptr, val, n_htop, nsp_i);
  198. }
  199. }
  200. }
  201. if (++i >= sdesc_size)
  202. break;
  203. if (i & 31)
  204. mask >>= 1;
  205. else
  206. mask = sdesc->livebits[i >> 5];
  207. }
  208. ra = nstack_walk_frame_ra(nsp, sdesc);
  209. sdesc = hipe_find_sdesc(ra);
  210. nsp = nstack_walk_next_frame(nsp, sdesc_size);
  211. }
  212. abort();
  213. }
  214. #ifdef HYBRID
  215. #ifdef INCREMENTAL
  216. Eterm *ma_fullsweep_nstack(Process *p, Eterm *n_htop, Eterm *n_hend)
  217. {
  218. /* known nstack walk state */
  219. Eterm *nsp;
  220. Eterm *nsp_end;
  221. const struct sdesc *sdesc;
  222. unsigned int sdesc_size;
  223. unsigned long ra;
  224. unsigned int i;
  225. unsigned int mask;
  226. /* arch-specific nstack walk state */
  227. struct nstack_walk_state walk_state;
  228. if (!nstack_walk_init_check(p))
  229. return n_htop;
  230. nsp = nstack_walk_nsp_begin(p);
  231. nsp_end = nstack_walk_nsp_end(p);
  232. sdesc = nstack_walk_init_sdesc(p, &walk_state);
  233. for (;;) {
  234. if (nstack_walk_nsp_reached_end(nsp, nsp_end)) {
  235. if (nsp == nsp_end)
  236. return n_htop;
  237. fprintf(stderr, "%s: passed end of stack\r\n", __FUNCTION__);
  238. break;
  239. }
  240. sdesc_size = nstack_walk_frame_size(sdesc);
  241. i = 0;
  242. mask = sdesc->livebits[0];
  243. for (;;) {
  244. if (mask & 1) {
  245. Eterm *nsp_i = nstack_walk_frame_index(nsp, i);
  246. Eterm val = *nsp_i;
  247. Eterm *obj_ptr = ptr_val(val);
  248. switch (primary_tag(val)) {
  249. case TAG_PRIMARY_LIST:
  250. COPYMARK_CONS(obj_ptr, n_htop, nsp_i, n_hend);
  251. break;
  252. case TAG_PRIMARY_BOXED:
  253. COPYMARK_BOXED(obj_ptr, n_htop, nsp_i, n_hend);
  254. break;
  255. default:
  256. break;
  257. }
  258. }
  259. if (++i >= sdesc_size)
  260. break;
  261. if (i & 31)
  262. mask >>= 1;
  263. else
  264. mask = sdesc->livebits[i >> 5];
  265. }
  266. ra = nstack_walk_frame_ra(nsp, sdesc);
  267. if (ra == (unsigned long)nbif_stack_trap_ra)
  268. ra = (unsigned long)p->hipe.ngra;
  269. sdesc = hipe_find_sdesc(ra);
  270. nsp = nstack_walk_next_frame(nsp, sdesc_size);
  271. }
  272. abort();
  273. }
  274. void ma_gensweep_nstack(Process *p, Eterm **ptr_old_htop, Eterm **ptr_n_htop)
  275. {
  276. /* known nstack walk state */
  277. Eterm *nsp;
  278. Eterm *nsp_end;
  279. const struct sdesc *sdesc;
  280. unsigned int sdesc_size;
  281. unsigned long ra;
  282. unsigned int i;
  283. unsigned int mask;
  284. /* arch-specific nstack walk state */
  285. struct nstack_walk_state walk_state;
  286. /* ma_gensweep-specific state */
  287. Eterm *low_water, *high_water, *surface;
  288. Eterm *n_htop;
  289. Eterm *old_htop;
  290. if (!nstack_walk_init_check(p))
  291. return;
  292. nsp = nstack_walk_nsp_begin(p);
  293. nsp_end = nstack_walk_nsp_end(p);
  294. low_water = global_heap;
  295. //high_water = global_high_water;
  296. surface = global_htop;
  297. old_htop = *ptr_old_htop;
  298. n_htop = *ptr_n_htop;
  299. sdesc = nstack_walk_init_sdesc(p, &walk_state);
  300. for (;;) {
  301. if (nstack_walk_nsp_reached_end(nsp, nsp_end)) {
  302. if (nsp == nsp_end) {
  303. *ptr_old_htop = old_htop;
  304. *ptr_n_htop = n_htop;
  305. return;
  306. }
  307. fprintf(stderr, "%s: passed end of stack\r\n", __FUNCTION__);
  308. break;
  309. }
  310. sdesc_size = nstack_walk_frame_size(sdesc);
  311. i = 0;
  312. mask = sdesc->livebits[0];
  313. for (;;) {
  314. if (mask & 1) {
  315. Eterm *nsp_i = nstack_walk_frame_index(nsp, i);
  316. Eterm gval = *nsp_i;
  317. if (is_boxed(gval)) {
  318. Eterm *ptr = boxed_val(gval);
  319. Eterm val = *ptr;
  320. if (MY_IS_MOVED(val)) {
  321. *nsp_i = val;
  322. } else if (ptr_within(ptr, low_water, high_water)) {
  323. MOVE_BOXED(ptr, val, old_htop, nsp_i);
  324. } else if (ptr_within(ptr, high_water, surface)) {
  325. MOVE_BOXED(ptr, val, n_htop, nsp_i);
  326. }
  327. } else if (is_list(gval)) {
  328. Eterm *ptr = list_val(gval);
  329. Eterm val = *ptr;
  330. if (is_non_value(val)) {
  331. *nsp_i = ptr[1];
  332. } else if (ptr_within(ptr, low_water, high_water)) {
  333. MOVE_CONS(ptr, val, old_htop, nsp_i);
  334. } else if (ptr_within(ptr, high_water, surface)) {
  335. MOVE_CONS(ptr, val, n_htop, nsp_i);
  336. }
  337. }
  338. }
  339. if (++i >= sdesc_size)
  340. break;
  341. if (i & 31)
  342. mask >>= 1;
  343. else
  344. mask = sdesc->livebits[i >> 5];
  345. }
  346. ra = nstack_walk_frame_ra(nsp, sdesc);
  347. if (ra == (unsigned long)nbif_stack_trap_ra)
  348. ra = (unsigned long)p->hipe.ngra;
  349. sdesc = hipe_find_sdesc(ra);
  350. nsp = nstack_walk_next_frame(nsp, sdesc_size);
  351. }
  352. abort();
  353. }
  354. #else /* not INCREMENTAL */
  355. Eterm *ma_fullsweep_nstack(Process *p, Eterm *n_htop)
  356. {
  357. /* known nstack walk state */
  358. Eterm *nsp;
  359. Eterm *nsp_end;
  360. const struct sdesc *sdesc;
  361. unsigned int sdesc_size;
  362. unsigned long ra;
  363. unsigned int i;
  364. unsigned int mask;
  365. /* arch-specific nstack walk state */
  366. struct nstack_walk_state walk_state;
  367. /* ma_fullsweep-specific state */
  368. Eterm *gheap = global_heap;
  369. Eterm *ghtop = global_htop;
  370. Eterm *goheap = global_old_heap;
  371. Eterm *gohtop = global_old_htop;
  372. if (!nstack_walk_init_check(p))
  373. return n_htop;
  374. nsp = nstack_walk_nsp_begin(p);
  375. nsp_end = nstack_walk_nsp_end(p);
  376. sdesc = nstack_walk_init_sdesc(p, &walk_state);
  377. for (;;) {
  378. if (nstack_walk_nsp_reached_end(nsp, nsp_end)) {
  379. if (nsp == nsp_end)
  380. return n_htop;
  381. fprintf(stderr, "%s: passed end of stack\r\n", __FUNCTION__);
  382. break;
  383. }
  384. sdesc_size = nstack_walk_frame_size(sdesc);
  385. i = 0;
  386. mask = sdesc->livebits[0];
  387. for (;;) {
  388. if (mask & 1) {
  389. Eterm *nsp_i = nstack_walk_frame_index(nsp, i);
  390. Eterm gval = *nsp_i;
  391. if (is_boxed(gval)) {
  392. Eterm *ptr = boxed_val(gval);
  393. Eterm val = *ptr;
  394. if (MY_IS_MOVED(val)) {
  395. *nsp_i = val;
  396. } else if (ptr_within(ptr, gheap, ghtop)) {
  397. MOVE_BOXED(ptr, val, n_htop, nsp_i);
  398. } else if (ptr_within(ptr, goheap, gohtop)) {
  399. MOVE_BOXED(ptr, val, n_htop, nsp_i);
  400. }
  401. } else if (is_list(gval)) {
  402. Eterm *ptr = list_val(gval);
  403. Eterm val = *ptr;
  404. if (is_non_value(val)) {
  405. *nsp_i = ptr[1];
  406. } else if (ptr_within(ptr, gheap, ghtop)) {
  407. MOVE_CONS(ptr, val, n_htop, nsp_i);
  408. } else if (ptr_within(ptr, gheap, ghtop)) {
  409. MOVE_CONS(ptr, val, n_htop, nsp_i);
  410. }
  411. }
  412. }
  413. if (++i >= sdesc_size)
  414. break;
  415. if (i & 31)
  416. mask >>= 1;
  417. else
  418. mask = sdesc->livebits[i >> 5];
  419. }
  420. ra = nstack_walk_frame_ra(nsp, sdesc);
  421. if (ra == (unsigned long)nbif_stack_trap_ra)
  422. ra = (unsigned long)p->hipe.ngra;
  423. sdesc = hipe_find_sdesc(ra);
  424. nsp = nstack_walk_next_frame(nsp, sdesc_size);
  425. }
  426. abort();
  427. }
  428. void ma_gensweep_nstack(Process *p, Eterm **ptr_old_htop, Eterm **ptr_n_htop)
  429. {
  430. /* known nstack walk state */
  431. Eterm *nsp;
  432. Eterm *nsp_end;
  433. const struct sdesc *sdesc;
  434. unsigned int sdesc_size;
  435. unsigned long ra;
  436. unsigned int i;
  437. unsigned int mask;
  438. /* arch-specific nstack walk state */
  439. struct nstack_walk_state walk_state;
  440. /* ma_gensweep-specific state */
  441. Eterm *low_water, *high_water, *surface;
  442. Eterm *n_htop;
  443. Eterm *old_htop;
  444. if (!nstack_walk_init_check(p))
  445. return;
  446. nsp = nstack_walk_nsp_begin(p);
  447. nsp_end = nstack_walk_nsp_end(p);
  448. low_water = global_heap;
  449. high_water = global_high_water;
  450. surface = global_htop;
  451. old_htop = *ptr_old_htop;
  452. n_htop = *ptr_n_htop;
  453. sdesc = nstack_walk_init_sdesc(p, &walk_state);
  454. for (;;) {
  455. if (nstack_walk_nsp_reached_end(nsp, nsp_end)) {
  456. if (nsp == nsp_end) {
  457. *ptr_old_htop = old_htop;
  458. *ptr_n_htop = n_htop;
  459. return;
  460. }
  461. fprintf(stderr, "%s: passed end of stack\r\n", __FUNCTION__);
  462. break;
  463. }
  464. sdesc_size = nstack_walk_frame_size(sdesc);
  465. i = 0;
  466. mask = sdesc->livebits[0];
  467. for (;;) {
  468. if (mask & 1) {
  469. Eterm *nsp_i = nstack_walk_frame_index(nsp, i);
  470. Eterm gval = *nsp_i;
  471. if (is_boxed(gval)) {
  472. Eterm *ptr = boxed_val(gval);
  473. Eterm val = *ptr;
  474. if (MY_IS_MOVED(val)) {
  475. *nsp_i = val;
  476. } else if (ptr_within(ptr, low_water, high_water)) {
  477. MOVE_BOXED(ptr, val, old_htop, nsp_i);
  478. } else if (ptr_within(ptr, high_water, surface)) {
  479. MOVE_BOXED(ptr, val, n_htop, nsp_i);
  480. }
  481. } else if (is_list(gval)) {
  482. Eterm *ptr = list_val(gval);
  483. Eterm val = *ptr;
  484. if (is_non_value(val)) {
  485. *nsp_i = ptr[1];
  486. } else if (ptr_within(ptr, low_water, high_water)) {
  487. MOVE_CONS(ptr, val, old_htop, nsp_i);
  488. } else if (ptr_within(ptr, high_water, surface)) {
  489. MOVE_CONS(ptr, val, n_htop, nsp_i);
  490. }
  491. }
  492. }
  493. if (++i >= sdesc_size)
  494. break;
  495. if (i & 31)
  496. mask >>= 1;
  497. else
  498. mask = sdesc->livebits[i >> 5];
  499. }
  500. ra = nstack_walk_frame_ra(nsp, sdesc);
  501. if (ra == (unsigned long)nbif_stack_trap_ra)
  502. ra = (unsigned long)p->hipe.ngra;
  503. sdesc = hipe_find_sdesc(ra);
  504. nsp = nstack_walk_next_frame(nsp, sdesc_size);
  505. }
  506. abort();
  507. }
  508. #endif /* INCREMENTAL */
  509. #endif /* HYBRID */