PageRenderTime 36ms CodeModel.GetById 28ms RepoModel.GetById 0ms app.codeStats 0ms

/src/spvw_garcol.d

https://github.com/ynd/clisp-branch--ynd-devel
D | 2539 lines | 2015 code | 58 blank | 466 comment | 421 complexity | a359c07cd23040c499f64212b36cd870 MD5 | raw file
Possible License(s): LGPL-2.0, GPL-2.0, BSD-3-Clause
  1. /* Garbage collector. */
  2. /* --------------------------- Specification --------------------------- */
  3. /* Execute a simple garbage collection.
  4. can trigger GC */
  5. local maygc void gar_col_simple (void);
  6. /* Execute a full garbage collection.
  7. > level: if 1, also drop all jitc code
  8. can trigger GC */
  9. global maygc void gar_col (int level);
  10. #ifdef SPVW_PAGES
  11. /* Supplement a simple garbage collection with a compacting.
  12. can trigger GC */
  13. local maygc void gar_col_compact (void);
  14. #endif
  15. #if defined(SPVW_MIXED_BLOCKS_OPPOSITE) && RESERVE
  16. /* Move the conses, to make a little more room. */
  17. local void move_conses (sintM delta);
  18. #endif
  19. /* --------------------------- Implementation -------------------------- */
  20. /* overall strategy:
  21. 1. pseudo-recursive marking by setting of garcol_bit.
  22. 2. relocate objects of fixed length (conses and similar),
  23. calculation of displacement of objects of variable length.
  24. 3. update of pointers.
  25. 4. perform the displacements of objects of variable length.
  26. */
  27. #include "spvw_genera1.c"
  28. /* marking-subroutine
  29. procedure: marking routine without stack usage (i.e.
  30. non-"recursive") by descent into the structure to be marked
  31. with pointer-modification (pointers are reversed,
  32. so that they can serve as "ariadne thread")
  33. Convention: an object X counts as marked, if
  34. - an object of variable length: bit garcol_bit,(X) is set
  35. - a two-pointer-object: bit garcol_bit,(X) is set
  36. - a SUBR/FSUBR: bit garcol_bit,(X+const_offset) is set
  37. - Character, Short-Float, Fixnum etc.: always. */
  38. #if DEBUG_GC_MARK
  39. #define IF_DEBUG_GC_MARK(statement) statement
  40. #if defined(WIDE_SOFT) || defined(WIDE_AUXI)
  41. /* oint is defined as uint64. */
  42. #define PRIoint "ll"
  43. #else
  44. /* oint is defined as uintP. Assume pointer_bitsize == long_bitsize. */
  45. #define PRIoint "l"
  46. #endif
  47. #else
  48. #define IF_DEBUG_GC_MARK(statement) /*nop*/
  49. #endif
  50. #define MARK(obj) mark(obj)
  51. #include "spvw_gcmark.c"
  52. #undef MARK
  53. /* pack a pointer into an object, without typeinfo.
  54. pointer_as_object(ptr): void* --> object
  55. pointer_was_object(obj): object --> void* */
  56. #ifdef TYPECODES
  57. #define pointer_as_object(ptr) type_pointer_object(0,ptr)
  58. #define pointer_was_object(obj) type_pointable(0,obj)
  59. #else
  60. #if defined(WIDE_AUXI)
  61. #define pointer_as_object(ptr) as_object_with_auxi((aint)(ptr))
  62. #define pointer_was_object(obj) ((void*)((obj).one_o))
  63. #else
  64. #define pointer_as_object(ptr) as_object((oint)(ptr))
  65. #define pointer_was_object(obj) ((void*)as_oint(obj))
  66. #endif
  67. #endif
  68. /* marking phase:
  69. All "active" structures are marked.
  70. everything is active, that is reachable
  71. - from the LISP-stack or
  72. - at Generational-GC: from the old generation or
  73. - as program-constant (the list of all packages belongs to this). */
  74. local void gc_mark_stack (gcv_object_t* objptr)
  75. {
  76. while (!eq(*objptr,nullobj)) { /* until STACK is finished: */
  77. IF_DEBUG_GC_MARK(fprintf(stderr,"gc_mark_stack: 0x%lx/%lu (%lu)\n",
  78. objptr,objptr,as_oint(*objptr)));
  79. if (as_oint(*objptr) & wbit(frame_bit_o)) { /* does a frame start here? */
  80. if ((as_oint(*objptr) & wbit(skip2_bit_o)) == 0) /* without skip2-Bit? */
  81. objptr skipSTACKop 2; /* yes -> advance by 2 */
  82. else
  83. objptr skipSTACKop 1; /* no -> advance by 1 */
  84. } else { /* normal object, mark: */
  85. var object obj = *objptr;
  86. #ifndef NO_symbolflags
  87. switch (typecode(obj)) { /* poss. remove Symbol-flags */
  88. case_symbolflagged:
  89. obj = symbol_without_flags(obj);
  90. default: break;
  91. }
  92. #endif
  93. gc_mark(obj);
  94. objptr skipSTACKop 1; /* advance */
  95. }
  96. }
  97. }
  98. #include "spvw_genera2.c"
  99. local void gc_markphase (void)
  100. {
  101. /* Mark all the STACKs */
  102. for_all_STACKs(gc_mark_stack(objptr));
  103. #ifdef GENERATIONAL_GC
  104. /* mark old generation, whereas it is perused sparingly: */
  105. if (generation > 0) { gc_mark_old_generation(); }
  106. #endif
  107. /* mark all program constants: */
  108. #if !defined(GENERATIONAL_GC)
  109. for_all_subrs(gc_mark(subr_tab_ptr_as_object(ptr));); /* subr_tab */
  110. for_all_constsyms(gc_mark(symbol_tab_ptr_as_object(ptr));); /* symbol_tab */
  111. #else
  112. /* Because of the macro in_old_generation(), gc_mark() may regard all
  113. constant symbols and all subrs as belonging to the old generation and
  114. may not walk through their pointers recursively. So do it by hand. */
  115. for_all_subrs({ /* peruse subr_tab */
  116. gc_mark(ptr->name);
  117. gc_mark(ptr->keywords);
  118. });
  119. for_all_constsyms({ /* peruse symbol_tab */
  120. gc_mark(ptr->symvalue);
  121. gc_mark(ptr->symfunction);
  122. gc_mark(ptr->hashcode);
  123. gc_mark(ptr->proplist);
  124. gc_mark(ptr->pname);
  125. gc_mark(ptr->homepackage);
  126. });
  127. #endif
  128. for_all_constobjs( gc_mark(*objptr); ); /* object_tab */
  129. for_all_threadobjs( gc_mark(*objptr); ); /* threads */
  130. /* The callers in back_trace are mostly already marked:
  131. they refer to subrs and closures that are currently being
  132. called and therefore cannot possibly be garbage-collected.
  133. But a few remain unmarked, so make sure all are really marked: */
  134. for_all_back_traces({
  135. for (; bt != NULL; bt = bt->bt_next)
  136. gc_mark(bt->bt_function);
  137. });
  138. }
  139. /* UP: Determine, if an object is still "live".
  140. I.e. if the mark bit is set after the marking phase. */
  141. local bool alive (object obj)
  142. {
  143. #ifdef TYPECODES
  144. switch (typecode(obj)) { /* according to type */
  145. case_pair: /* Cons */
  146. if (in_old_generation(obj,typecode(obj),1)) return true;
  147. if (marked(ThePointer(obj))) return true; else return false;
  148. case_symbol: /* Symbol */
  149. case_array: /* Array */
  150. case_bignum: /* Bignum */
  151. #ifndef IMMEDIATE_FFLOAT
  152. case_ffloat: /* Single-Float */
  153. #endif
  154. case_dfloat: /* Double-Float */
  155. case_lfloat: /* Long-Float */
  156. case_record: /* Record */
  157. if (in_old_generation(obj,typecode(obj),0)) return true;
  158. if (marked(ThePointer(obj))) return true; else return false;
  159. case_subr: /* Subr */
  160. if (marked(TheSubr(obj))) return true; else return false;
  161. case_machine: /* Machine Pointer */
  162. case_char: /* Character */
  163. case_system: /* Frame-pointer, Small-Read-label, system */
  164. case_fixnum: /* Fixnum */
  165. case_sfloat: /* Short-Float */
  166. #ifdef IMMEDIATE_FFLOAT
  167. case_ffloat: /* Single-Float */
  168. #endif
  169. return true;
  170. default:
  171. /* these are no objects. */
  172. /*NOTREACHED*/ abort();
  173. }
  174. #else
  175. switch (as_oint(obj) & nonimmediate_heapcode_mask) {
  176. case varobject_bias+varobjects_misaligned:
  177. if (in_old_generation(obj,,0)) return true;
  178. if (marked(ThePointer(obj))) return true; else return false;
  179. case cons_bias+conses_misaligned:
  180. #ifdef STANDARD_HEAPCODES
  181. /* NB: (immediate_bias & nonimmediate_heapcode_mask) == cons_bias. */
  182. if (immediate_object_p(obj)) return true;
  183. #endif
  184. if (in_old_generation(obj,,1)) return true;
  185. if (marked(ThePointer(obj))) return true; else return false;
  186. #ifdef STANDARD_HEAPCODES
  187. case subr_bias:
  188. if (marked(TheSubr(obj))) return true; else return false;
  189. #endif
  190. default:
  191. return true;
  192. }
  193. #endif
  194. }
  195. #include "spvw_weak.c"
  196. /* unmark SUBRs and fixed Symbols: */
  197. local void unmark_fixed_varobjects (void)
  198. {
  199. /* Even if defined(GENERATIONAL_GC), because the macro in_old_generation()
  200. has undefined behaviour for constsyms and subrs, therefore we don't know
  201. a priori whether the constsyms and subrs have their mark bit set. */
  202. for_all_subrs( unmark(&((Subr)ptr)->GCself); ); /* unmark each Subr */
  203. for_all_constsyms( unmark(&((Symbol)ptr)->GCself); ); /* unmark each Symbol in symbol_tab */
  204. }
  205. #if !defined(MORRIS_GC)
  206. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  207. /* concentrate CONS-cells between page->page_start and page->page_end
  208. aloft: */
  209. local void gc_compact_cons_page (Page* page)
  210. {
  211. /* the pointer p1 moves from below and the pointer p2 from
  212. above through the memory region, while (!)they collide.
  213. Marked structures are moved above unmarked. */
  214. var aint p1 = page->page_start; /* lower bound */
  215. var aint p2 = page->page_end; /* upper bound */
  216. sweeploop:
  217. /* search the next-higher unmarked cell <p2 and unmark all: */
  218. sweeploop1:
  219. if (p1==p2) goto sweepok2; /* bounds are equal -> finished */
  220. p2 -= sizeof(cons_); /* capture next cell from above */
  221. if (marked(p2)) { /* marked? */
  222. unmark(p2); /* unmark */
  223. goto sweeploop1;
  224. }
  225. /* p1 <= p2, p2 points to an unmarked cell.
  226. search next lower marked cell >=p1: */
  227. sweeploop2:
  228. if (p1==p2) goto sweepok1; /* bounds are equal -> finished */
  229. if (!marked(p1)) { /* unmarked? */
  230. p1 += sizeof(cons_); /* at the next lower cell */
  231. goto sweeploop2; /* continue search */
  232. }
  233. /* p1 < p2, p1 points to a marked cell. */
  234. unmark(p1); /* unmark */
  235. /* copy content of cell into the unmark cell: */
  236. ((gcv_object_t*)p2)[0] = ((gcv_object_t*)p1)[0];
  237. ((gcv_object_t*)p2)[1] = ((gcv_object_t*)p1)[1];
  238. *(gcv_object_t*)p1 = pointer_as_object(p2); /* leave new addresse */
  239. mark(p1); /* and mark (as identification for the update) */
  240. p1 += sizeof(cons_); /* this cell is finished. */
  241. goto sweeploop; /* continue */
  242. sweepok1: p1 += sizeof(cons_); /* skip last unmarked Cons */
  243. sweepok2:
  244. /* p1 = new lower bound of the Cons-region */
  245. page->page_start = p1;
  246. }
  247. #else
  248. /* concentrate CONS-cells between page->page_start and page->page_end
  249. below: */
  250. local void gc_compact_cons_page (Page* page)
  251. {
  252. /* the pointer p1 moves from below and the pointer p2 from
  253. above through the memory region, while (!)they collide.
  254. Marked structures are moved above unmarked. */
  255. var aint p1 = page->page_start; /* lower bound */
  256. var aint p2 = page->page_end; /* upper bound */
  257. sweeploop:
  258. /* search next higher marked cell <p2: */
  259. sweeploop1:
  260. if (p1==p2) goto sweepok2; /* bounds are equal -> finished */
  261. p2 -= sizeof(cons_); /* capture next cell from above */
  262. if (!marked(p2)) goto sweeploop1; /* unmarked? */
  263. /* p1 <= p2, p2 points to a marked cell. */
  264. unmark(p2); /* unmark */
  265. /* search next lower unmarked cell >=p1 and unmark all: */
  266. sweeploop2:
  267. if (p1==p2) goto sweepok1; /* bounds are equal -> finished */
  268. if (marked(p1)) { /* marked? */
  269. unmark(p1); /* unmark */
  270. p1 += sizeof(cons_); /* at next upper cell */
  271. goto sweeploop2; /* continue search */
  272. }
  273. /* p1 < p2, p1 points to an unmarked cell.
  274. copy cell content from the marked into the unmark cell: */
  275. ((gcv_object_t*)p1)[0] = ((gcv_object_t*)p2)[0];
  276. ((gcv_object_t*)p1)[1] = ((gcv_object_t*)p2)[1];
  277. *(gcv_object_t*)p2 = pointer_as_object(p1); /* leave new address */
  278. mark(p2); /* and mark (as identification for update) */
  279. p1 += sizeof(cons_); /* this cell is finished. */
  280. goto sweeploop; /* continue */
  281. sweepok1: p1 += sizeof(cons_); /* skip last marked Cons */
  282. sweepok2:
  283. /* p1 = new upper bound of the Cons-region */
  284. page->page_end = p1;
  285. }
  286. #endif
  287. #else /* defined(MORRIS_GC) */
  288. /* Algorithm see:
  289. [F. Lockwood Morris: A time- and space-efficient garbage collection algorithm.
  290. CACM 21,8 (August 1978), 662-665.]
  291. Delete all unmarked CONS-cells and unmark the marked CONS-cells,
  292. so that the mark bit is available for the reverse spointers. */
  293. local void gc_morris1 (Page* page)
  294. {
  295. var aint p1 = page->page_start; /* lower bound */
  296. var aint p2 = page->page_end; /* upper bound */
  297. var aint d = 0; /* also count free memory */
  298. while (p1 != p2) {
  299. if (!marked(p1)) {
  300. ((gcv_object_t*)p1)[0] = nullobj;
  301. ((gcv_object_t*)p1)[1] = nullobj;
  302. d += sizeof(cons_);
  303. } else {
  304. unmark(p1);
  305. #ifdef DEBUG_SPVW
  306. if (eq(((gcv_object_t*)p1)[0],nullobj) || eq(((gcv_object_t*)p1)[1],nullobj))
  307. abort();
  308. #endif
  309. }
  310. p1 += sizeof(cons_); /* this cell is finished. */
  311. }
  312. page->page_gcpriv.d = d; /* store free memory */
  313. }
  314. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  315. /* There is only one page with Two-Pointer-Objects. */
  316. local void gc_morris2 (Page* page)
  317. {
  318. /* Each cell within a Cons now contains a list of all
  319. addresses of pointers to this cell, that point to this cell
  320. from a root or from a Varobject.
  321. traverse the undeleted conses from left to right:
  322. (in between, each cell contains a list of all addresses
  323. of pointers to this cell, that point to this cell from a root,
  324. from a varobject or a cons lying further to the left.) */
  325. var aint p1 = page->page_start; /* lower bound */
  326. var aint p2 = p1 + page->page_gcpriv.d; /* later lower bound */
  327. var aint p1limit = page->page_end; /* upper bound */
  328. while (p1 != p1limit) { /* always: p1 <= p2 <= p1limit */
  329. /* both cells of a cons are treated exactly the same. */
  330. var object obj = *(gcv_object_t*)p1;
  331. if (!eq(obj,nullobj)) {
  332. /* p1 is moved to p2. */
  333. #ifdef TYPECODES
  334. /* the so far registered pointers to this cell are updated: */
  335. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  336. obj = without_mark_bit(obj);
  337. var aint p = upointer(obj);
  338. var object next_obj = *(gcv_object_t*)p;
  339. *(gcv_object_t*)p = type_pointer_object(typecode(obj),p2);
  340. obj = next_obj;
  341. }
  342. { /* if the cell contains a pointer "to the right", it is reversed. */
  343. var tint type = typecode(obj);
  344. switch (type) {
  345. case_pair: {
  346. var aint p = upointer(obj);
  347. if (!in_old_generation(obj,type,1) && (p > p1)) {
  348. /* For later update, insert
  349. p1 in the list of pointers to p: */
  350. *(gcv_object_t*)p1 = *(gcv_object_t*)p;
  351. *(gcv_object_t*)p = with_mark_bit(type_pointer_object(type,p1));
  352. break;
  353. }
  354. }
  355. default:
  356. *(gcv_object_t*)p1 = obj;
  357. }
  358. }
  359. #else /* no TYPECODES */
  360. /* the so far registered pointers to this cell are updated: */
  361. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  362. obj = without_mark_bit(obj);
  363. var aint p = (aint)ThePointer(obj);
  364. var object next_obj = *(gcv_object_t*)p;
  365. *(gcv_object_t*)p = as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2);
  366. obj = next_obj;
  367. }
  368. /* if the cell contains a pointer "to the right", it is reversed. */
  369. if (consp(obj)) {
  370. var aint p = (aint)ThePointer(obj);
  371. if (!in_old_generation(obj,,1) && (p > p1)) {
  372. /* For later update, insert
  373. p1 in the list of pointers to p: */
  374. *(gcv_object_t*)p1 = *(gcv_object_t*)p;
  375. *(gcv_object_t*)p = with_mark_bit(as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p1));
  376. } else {
  377. *(gcv_object_t*)p1 = obj;
  378. }
  379. } else {
  380. *(gcv_object_t*)p1 = obj;
  381. }
  382. #endif
  383. p2 += sizeof(gcv_object_t);
  384. }
  385. p1 += sizeof(gcv_object_t);
  386. }
  387. if (p2!=p1limit)
  388. abort();
  389. }
  390. local void gc_morris3 (Page* page)
  391. {
  392. /* Each cell within a cons now contains again the original content.
  393. Traverse the undeleted conses from right to left
  394. and compact them on the right:
  395. (in between, each cell contains a list of all addresses
  396. of pointers to this cell, that point to this cell
  397. from a cons lying further to the right.) */
  398. var aint p1limit = page->page_start; /* lower bound */
  399. var aint p1 = page->page_end; /* upper bound */
  400. var aint p2 = p1; /* upper bound */
  401. #ifdef DEBUG_SPVW
  402. while (p1!=p1limit) {
  403. p1 -= 2*sizeof(gcv_object_t);
  404. if (eq(*(gcv_object_t*)p1,nullobj)+eq(*(gcv_object_t*)(p1^sizeof(gcv_object_t)),nullobj)==1)
  405. abort();
  406. }
  407. p1 = page->page_end;
  408. #endif
  409. while (p1!=p1limit) { /* always: p1limit <= p1 <= p2 */
  410. /* both cells of a cons are treated exactly the same. */
  411. p1 -= sizeof(gcv_object_t);
  412. #ifdef DEBUG_SPVW
  413. if (eq(*(gcv_object_t*)p1,nullobj)+eq(*(gcv_object_t*)(p1^sizeof(gcv_object_t)),nullobj)==1)
  414. abort();
  415. if (((p1 % (2*sizeof(gcv_object_t))) != 0)
  416. && ((p2 % (2*sizeof(gcv_object_t))) != 0))
  417. abort();
  418. #endif
  419. var object obj = *(gcv_object_t*)p1;
  420. if (!eq(obj,nullobj)) {
  421. p2 -= sizeof(gcv_object_t);
  422. /* p1 is moved to p2. */
  423. #ifdef TYPECODES
  424. /* The newly registered pointers to this cell are updated: */
  425. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  426. obj = without_mark_bit(obj);
  427. var aint p = upointer(obj);
  428. var object next_obj = *(gcv_object_t*)p;
  429. *(gcv_object_t*)p = type_pointer_object(typecode(obj),p2);
  430. obj = next_obj;
  431. }
  432. #ifdef DEBUG_SPVW
  433. if (eq(obj,nullobj)) abort();
  434. #endif
  435. *(gcv_object_t*)p2 = obj;
  436. {
  437. var tint type = typecode(obj);
  438. if (!gcinvariant_type_p(type)) /* un-movable -> do nothing */
  439. switch (type) {
  440. case_pair: { /* Two-Pointer-Object */
  441. var aint p = upointer(obj);
  442. if (p < p1) { /* pointer to the left? */
  443. /* For later update, insert
  444. p2 into the list of pointers to p: */
  445. #ifdef DEBUG_SPVW
  446. if (eq(*(gcv_object_t*)p,nullobj)) abort();
  447. #endif
  448. *(gcv_object_t*)p2 = *(gcv_object_t*)p;
  449. *(gcv_object_t*)p = with_mark_bit(type_pointer_object(type,p2));
  450. } else if (p == p1) { /* pointer to itself? */
  451. *(gcv_object_t*)p2 = type_pointer_object(type,p2);
  452. }
  453. }
  454. break;
  455. default: /* object of variable length */
  456. if (marked(ThePointer(obj))) /* marked? */
  457. *(gcv_object_t*)p2 = type_untype_object(type,untype(*(gcv_object_t*)ThePointer(obj)));
  458. break;
  459. }
  460. }
  461. #else /* no TYPECODES */
  462. /* The newly registered pointers to this cell are updated: */
  463. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  464. obj = without_mark_bit(obj);
  465. var aint p = (aint)ThePointer(obj);
  466. var object next_obj = *(gcv_object_t*)p;
  467. *(gcv_object_t*)p = as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2);
  468. obj = next_obj;
  469. }
  470. #ifdef DEBUG_SPVW
  471. if (eq(obj,nullobj)) abort();
  472. #endif
  473. *(gcv_object_t*)p2 = obj;
  474. if (!gcinvariant_object_p(obj)) { /* un-movable -> do nothing */
  475. if (consp(obj)) {
  476. /* Two-Pointer-Object */
  477. var aint p = (aint)ThePointer(obj);
  478. if (p < p1) { /* pointer to the left? */
  479. /* for later update, insert
  480. p2 into the list of pointers to p: */
  481. #ifdef DEBUG_SPVW
  482. if (eq(*(gcv_object_t*)p,nullobj)) abort();
  483. #endif
  484. *(gcv_object_t*)p2 = *(gcv_object_t*)p;
  485. *(gcv_object_t*)p = with_mark_bit(as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2));
  486. } else if (p == p1) { /* pointer to itself? */
  487. *(gcv_object_t*)p2 = as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2);
  488. }
  489. } else {
  490. /* object of variable length */
  491. if (marked(ThePointer(obj))) /* marked? */
  492. *(gcv_object_t*)p2 = as_object((as_oint(obj) & nonimmediate_bias_mask) | (as_oint(*(gcv_object_t*)ThePointer(obj)) & ~wbit(garcol_bit_o) & ~(oint)nonimmediate_bias_mask));
  493. }
  494. }
  495. #endif
  496. }
  497. }
  498. /* p2 = new lower bound of the Cons-region */
  499. if (p2 != page->page_start + page->page_gcpriv.d)
  500. abort();
  501. page->page_start = p2;
  502. }
  503. #elif defined(SPVW_MIXED_BLOCKS_STAGGERED)
  504. local void gc_morris2 (Page* page)
  505. {
  506. /* Each cell within a Cons now contains a list of all
  507. addresses of pointers to this cell, that point to this cell
  508. from a root or from a Varobject.
  509. Traverse the undeleted conses from right to left:
  510. (in between, each cell contains a liste of all addresses
  511. of pointers to this cell, that point to this cell from a root,
  512. from a varobject or a cons lying further to the right.) */
  513. var aint p1 = page->page_end; /* upper bound */
  514. var aint p2 = p1 - page->page_gcpriv.d; /* later upper bound */
  515. var aint p1limit = page->page_start; /* lower bound */
  516. #ifdef DEBUG_SPVW
  517. while (p1!=p1limit) {
  518. p1 -= 2*sizeof(gcv_object_t);
  519. if (eq(*(gcv_object_t*)p1,nullobj)+eq(*(gcv_object_t*)(p1^sizeof(gcv_object_t)),nullobj)==1)
  520. abort();
  521. }
  522. p1 = page->page_end;
  523. #endif
  524. while (p1!=p1limit) { /* always: p1limit <= p2 <= p1 */
  525. /* both cells of a cons are treated exactly the same. */
  526. p1 -= sizeof(gcv_object_t);
  527. #ifdef DEBUG_SPVW
  528. if (eq(*(gcv_object_t*)p1,nullobj)+eq(*(gcv_object_t*)(p1^sizeof(gcv_object_t)),nullobj)==1)
  529. abort();
  530. #endif
  531. var object obj = *(gcv_object_t*)p1;
  532. if (!eq(obj,nullobj)) {
  533. p2 -= sizeof(gcv_object_t);
  534. /* p1 is moved to p2. */
  535. #ifdef TYPECODES
  536. /* the so far registered pointers to this cell are updated: */
  537. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  538. obj = without_mark_bit(obj);
  539. var aint p = upointer(obj);
  540. var object next_obj = *(gcv_object_t*)p;
  541. *(gcv_object_t*)p = type_pointer_object(typecode(obj),p2);
  542. obj = next_obj;
  543. }
  544. /* obj = original content of the cell p1. */
  545. #ifdef DEBUG_SPVW
  546. if (eq(obj,nullobj)) abort();
  547. #endif
  548. /* if the cell contains a pointer "to the left", it is reversed. */
  549. {
  550. var tint type = typecode(obj);
  551. switch (type) {
  552. case_pair: {
  553. var aint p = upointer(obj);
  554. if (!in_old_generation(obj,type,1) && (p < p1)) {
  555. /* For later update, insert
  556. p1 into the list of pointers to p: */
  557. *(gcv_object_t*)p1 = *(gcv_object_t*)p;
  558. *(gcv_object_t*)p = with_mark_bit(type_pointer_object(type,p1));
  559. break;
  560. }
  561. }
  562. default:
  563. *(gcv_object_t*)p1 = obj;
  564. }
  565. }
  566. #else
  567. /* the so far registered pointers to this cell are updated: */
  568. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  569. obj = without_mark_bit(obj);
  570. var aint p = (aint)ThePointer(obj);
  571. var object next_obj = *(gcv_object_t*)p;
  572. *(gcv_object_t*)p = as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2);
  573. obj = next_obj;
  574. }
  575. /* obj = original content of the cell p1. */
  576. #ifdef DEBUG_SPVW
  577. if (eq(obj,nullobj)) abort();
  578. #endif
  579. /* if the cell contains a pointer "to the left", it is reversed. */
  580. if (consp(obj)) {
  581. var aint p = (aint)ThePointer(obj);
  582. if (!in_old_generation(obj,,1) && (p < p1)) {
  583. /* For later update, insert
  584. p1 into the list of pointers to p: */
  585. *(gcv_object_t*)p1 = *(gcv_object_t*)p;
  586. *(gcv_object_t*)p = with_mark_bit(as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p1));
  587. } else {
  588. *(gcv_object_t*)p1 = obj;
  589. }
  590. } else {
  591. *(gcv_object_t*)p1 = obj;
  592. }
  593. #endif
  594. }
  595. }
  596. if (p2!=p1limit)
  597. abort();
  598. }
  599. local void gc_morris3 (Page* page)
  600. {
  601. /* Each cell within a cons now contains again the original content.
  602. Traverse the undeleted conses from left to right
  603. and compact them on the left:
  604. (in between, each cell contains a list of all addresses
  605. of pointers to this cell, that point to this cell
  606. from a cons lying further to the left.) */
  607. var aint p1limit = page->page_end; /* obere Grenze */
  608. var aint p1 = page->page_start; /* lower bound */
  609. var aint p2 = p1; /* lower bound */
  610. while (p1!=p1limit) { /* always: p1limit <= p1 <= p2 */
  611. /* both cells of a cons are treated exactly the same. */
  612. var object obj = *(gcv_object_t*)p1;
  613. if (!eq(obj,nullobj)) {
  614. /* p1 is moved to p2. */
  615. #ifdef TYPECODES
  616. /* The newly registered pointers to this cell are updated: */
  617. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  618. obj = without_mark_bit(obj);
  619. var aint p = upointer(obj);
  620. var object next_obj = *(gcv_object_t*)p;
  621. *(gcv_object_t*)p = type_pointer_object(typecode(obj),p2);
  622. obj = next_obj;
  623. }
  624. /* obj = true content of the cell p1. */
  625. {
  626. var tint type = typecode(obj);
  627. if (!gcinvariant_type_p(type)) /* un-movable -> do nothing */
  628. switch (type) {
  629. case_pair: { /* Two-Pointer-Object */
  630. var aint p = upointer(obj);
  631. if (p > p1) { /* pointer to the right? */
  632. /* For later update, insert
  633. p2 into the list of pointers to p: */
  634. #ifdef DEBUG_SPVW
  635. if (eq(*(gcv_object_t*)p,nullobj)) abort();
  636. #endif
  637. *(gcv_object_t*)p2 = *(gcv_object_t*)p;
  638. *(gcv_object_t*)p = with_mark_bit(type_pointer_object(type,p2));
  639. } else if (p == p1) { /* Pointer to itself? */
  640. *(gcv_object_t*)p2 = type_pointer_object(type,p2);
  641. } else {
  642. *(gcv_object_t*)p2 = obj;
  643. }
  644. }
  645. break;
  646. default: /* object of variable length */
  647. if (marked(ThePointer(obj))) /* marked? */
  648. *(gcv_object_t*)p2 = type_untype_object(type,untype(*(gcv_object_t*)ThePointer(obj)));
  649. else
  650. *(gcv_object_t*)p2 = obj;
  651. break;
  652. }
  653. else { /* un-movable or pointer into the old generation -> do nothing */
  654. *(gcv_object_t*)p2 = obj;
  655. }
  656. }
  657. #else
  658. /* The newly registered pointers to this cell are updated: */
  659. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  660. obj = without_mark_bit(obj);
  661. var aint p = (aint)ThePointer(obj);
  662. var object next_obj = *(gcv_object_t*)p;
  663. *(gcv_object_t*)p = as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2);
  664. obj = next_obj;
  665. }
  666. /* obj = true content of the cell p1. */
  667. if (!gcinvariant_object_p(obj)) { /* un-movable -> do nothing */
  668. if (consp(obj)) {
  669. /* Two-Pointer-Object */
  670. var aint p = (aint)ThePointer(obj);
  671. if (p > p1) { /* pointer to the right? */
  672. /* For later update, insert
  673. p2 into the list of pointers to p: */
  674. #ifdef DEBUG_SPVW
  675. if (eq(*(gcv_object_t*)p,nullobj)) abort();
  676. #endif
  677. *(gcv_object_t*)p2 = *(gcv_object_t*)p;
  678. *(gcv_object_t*)p = with_mark_bit(as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2));
  679. } else if (p == p1) { /* pointer to itself? */
  680. *(gcv_object_t*)p2 = as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)p2);
  681. } else {
  682. *(gcv_object_t*)p2 = obj;
  683. }
  684. } else {
  685. /* Object of variable length */
  686. if (marked(ThePointer(obj))) /* marked? */
  687. *(gcv_object_t*)p2 = as_object((as_oint(obj) & nonimmediate_bias_mask) | (as_oint(*(gcv_object_t*)ThePointer(obj)) & ~wbit(garcol_bit_o) & ~(oint)nonimmediate_bias_mask));
  688. else
  689. *(gcv_object_t*)p2 = obj;
  690. }
  691. } else { /* un-movable or pointer into the old generation -> do nothing */
  692. *(gcv_object_t*)p2 = obj;
  693. }
  694. #endif
  695. p2 += sizeof(gcv_object_t);
  696. }
  697. p1 += sizeof(gcv_object_t);
  698. }
  699. /* p2 = new upper bound of the Cons-region */
  700. if (p2 != page->page_end - page->page_gcpriv.d)
  701. abort();
  702. page->page_end = p2;
  703. }
  704. #else /* SPVW_PURE_BLOCKS <==> SINGLEMAP_MEMORY */
  705. /* gc_morris2 and gc_morris3 must be called for each page exactly once,
  706. first gc_morris2 from right to left, then gc_morris3 from left to right
  707. (in terms of the positioning of the addresses)! */
  708. local void gc_morris2 (Page* page)
  709. {
  710. /* Each cell within a Cons now contains a list of all
  711. addresses of pointers to this cell, that point to this cell
  712. from a root or from a Varobject.
  713. Traverse the undeleted conses from right to left:
  714. (in between, each cell contains a liste of all addresses
  715. of pointers to this cell, that point to this cell from a root,
  716. from a varobject or a cons lying further to the right.) */
  717. var aint p1 = page->page_end; /* upper bound */
  718. var aint p2 = p1 - page->page_gcpriv.d; /* later upper bound */
  719. var aint p1limit = page->page_start; /* lower bound */
  720. while (p1!=p1limit) { /* always: p1limit <= p2 <= p1 */
  721. /* both cells of a cons are treated exactly the same. */
  722. p1 -= sizeof(gcv_object_t);
  723. var object obj = *(gcv_object_t*)p1;
  724. if (!eq(obj,nullobj)) {
  725. p2 -= sizeof(gcv_object_t);
  726. /* p1 is moved to p2.
  727. the so far registered pointers to this cell are updated: */
  728. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  729. obj = without_mark_bit(obj);
  730. var object next_obj = *(gcv_object_t*)pointable(obj);
  731. *(gcv_object_t*)pointable(obj) = as_object(p2);
  732. obj = next_obj;
  733. }
  734. /* obj = original content of the cell p1.
  735. if the cell contains a pointer "to the left", it is reversed. */
  736. if (is_cons_heap(typecode(obj))
  737. && !in_old_generation(obj,typecode(obj),1)
  738. && ((aint)pointable(obj) < p1)
  739. ) {
  740. /* For later update, insert
  741. p1 into the list of pointers to obj: */
  742. *(gcv_object_t*)p1 = *(gcv_object_t*)pointable(obj);
  743. *(gcv_object_t*)pointable(obj) = with_mark_bit(as_object(p1));
  744. } else {
  745. *(gcv_object_t*)p1 = obj;
  746. }
  747. }
  748. }
  749. if (p2!=p1limit)
  750. abort();
  751. }
  752. local void gc_morris3 (Page* page)
  753. {
  754. /* Each cell within a cons now contains again the original content.
  755. Traverse the undeleted conses from left to right
  756. and compact them on the left:
  757. (in between, each cell contains a list of all addresses
  758. of pointers to this cell, that point to this cell
  759. from a cons lying further to the left.) */
  760. var aint p1limit = page->page_end; /* upper bound */
  761. var aint p1 = page->page_start; /* lower bound */
  762. var aint p2 = p1; /* lower bound */
  763. while (p1!=p1limit) { /* always: p1limit <= p1 <= p2 */
  764. /* both cells of a cons are treated exactly the same. */
  765. var object obj = *(gcv_object_t*)p1;
  766. if (!eq(obj,nullobj)) {
  767. /* p1 is moved to p2.
  768. The newly registered pointers to this cell are updated: */
  769. while ((as_oint(obj) & wbit(garcol_bit_o)) != 0) { /* process list */
  770. obj = without_mark_bit(obj);
  771. var object next_obj = *(gcv_object_t*)pointable(obj);
  772. *(gcv_object_t*)pointable(obj) = as_object(p2);
  773. obj = next_obj;
  774. }
  775. /* obj = true content of cell p1. */
  776. {
  777. var tint type = typecode(obj);
  778. if (!is_unused_heap(type) && !in_old_generation(obj,type,?))
  779. if (is_cons_heap(type)) {
  780. /* Two-Pointer-Object */
  781. if ((aint)pointable(obj) > p1) { /* pointer to the right? */
  782. /* For later update, insert
  783. p2 into the list of pointers to obj: */
  784. *(gcv_object_t*)p2 = *(gcv_object_t*)pointable(obj);
  785. *(gcv_object_t*)pointable(obj) = with_mark_bit(as_object(p2));
  786. } else if ((aint)pointable(obj) == p1) { /* pointer to itself? */
  787. *(gcv_object_t*)p2 = as_object(p2);
  788. } else {
  789. *(gcv_object_t*)p2 = obj;
  790. }
  791. } else {
  792. /* object of variable length */
  793. if (marked(ThePointer(obj))) /* marked? */
  794. *(gcv_object_t*)p2 = type_untype_object(type,untype(*(gcv_object_t*)ThePointer(obj)));
  795. else
  796. *(gcv_object_t*)p2 = obj;
  797. } else { /* un-movable or pointer into the old generation -> do nothing */
  798. *(gcv_object_t*)p2 = obj;
  799. }
  800. }
  801. p2 += sizeof(gcv_object_t);
  802. }
  803. p1 += sizeof(gcv_object_t);
  804. }
  805. /* p2 = new upper bound of the Cons-region */
  806. if (p2 != page->page_end - page->page_gcpriv.d)
  807. abort();
  808. page->page_end = p2;
  809. }
  810. #endif
  811. #endif
  812. /* modify the self-pointer of an object of variable length:
  813. set_GCself(p,type,addr);
  814. sets p->GCself to type_pointer_object(type,addr). */
  815. #ifdef TYPECODES
  816. #if !(exact_uint_size_p(oint_type_len) && ((oint_type_shift%hfintsize)==0) && (tint_type_mask == bit(oint_type_len)-1))
  817. #ifdef MAP_MEMORY
  818. /* addr contains typeinfo */
  819. #define make_GCself(type,addr) \
  820. type_pointer_object((type)&(tint_type_mask),(addr)&(oint_addr_mask))
  821. #else
  822. /* addr contains no typeinfo */
  823. #define make_GCself(type,addr) \
  824. type_pointer_object((type)&(tint_type_mask),addr)
  825. #endif
  826. #define set_GCself(p,type,addr) \
  827. ((Varobject)(p))->GCself = make_GCself(type,addr)
  828. #else /* better: though two memory accesses, but less arithmetics */
  829. #define make_GCself(type,addr) \
  830. type_pointer_object((type)&(tint_type_mask),(addr)&~(oint_type_mask))
  831. #define set_GCself(p,type,addr) \
  832. (((Varobject)(p))->GCself = type_pointer_object(0,addr), \
  833. ((Varobject)(p))->header_flags = (type))
  834. #endif
  835. #else
  836. #define make_GCself(type,addr) /* ignore type */ \
  837. as_object((oint)(addr))
  838. #define set_GCself(p,type,addr) /* ignore type */ \
  839. ((Varobject)(p))->GCself = make_GCself(type,addr)
  840. #endif
  841. #ifdef HAVE_SMALL_SSTRING
  842. /* Special handling of forward pointers among simple-strings. */
  843. local void gc_sweep1_sstring_forward (aint p2) {
  844. var gcv_object_t forward = ((Sistring)p2)->data;
  845. if (sstring_flags(TheSstring(forward)) & sstringflags_relocated_B) {
  846. var gcv_object_t target = TheSstring(forward)->GCself;
  847. var aint backchain = p2;
  848. for (;;) {
  849. var gcv_object_t backpointer = ((Varobject)backchain)->GCself;
  850. ((Varobject)backchain)->GCself = target;
  851. sstring_flags_set((Sstring)backchain,sstringflags_relocated_B);
  852. if (sstring_flags((Sstring)backchain) & sstringflags_backpointer_B)
  853. backchain = (aint)ThePointer(without_mark_bit(backpointer));
  854. else
  855. break;
  856. }
  857. } else {
  858. /* Leave a backpointer for later fixup.
  859. Each string can have only one forward pointer directly pointing
  860. to it. This ensures that the backchain is a singly linked list. */
  861. if (sstring_flags(TheSstring(forward)) & sstringflags_backpointer_B)
  862. /*NOTREACHED*/ abort();
  863. TheSstring(forward)->GCself = with_mark_bit(make_GCself(sstring_type,p2));
  864. sstring_flags_set(TheSstring(forward),sstringflags_backpointer_B);
  865. }
  866. /* Don't reclaim the space at p2 during this GC, because
  867. 1. we need the mark bit at p2 so that update() does the
  868. relocation, and the mark bit tells gc_sweep2_varobject_page
  869. that the object is not yet reclaimed.
  870. 2. otherwise last_open_ptr may be set to &((Varobject)p2)->GCself
  871. later. */
  872. }
  873. local void gc_sweep1_sstring_target (aint p2, aint p1) {
  874. if (sstring_flags((Sstring)p2) & sstringflags_relocated_B)
  875. /*NOTREACHED*/ abort();
  876. var gcv_object_t target; target = with_mark_bit(make_GCself(sstring_type,p1));
  877. var aint backchain = p2;
  878. for (;;) {
  879. var gcv_object_t backpointer = ((Varobject)backchain)->GCself;
  880. ((Varobject)backchain)->GCself = target;
  881. sstring_flags_set((Sstring)backchain,sstringflags_relocated_B);
  882. if (sstring_flags((Sstring)backchain) & sstringflags_backpointer_B)
  883. backchain = (aint)ThePointer(without_mark_bit(backpointer));
  884. else
  885. break;
  886. }
  887. }
  888. #endif
  889. /* Special handling of forward pointers among CLOS instances. */
  890. local void gc_sweep1_instance_forward (aint p2) {
  891. var gcv_object_t forward = ((Instance)p2)->inst_class_version;
  892. if (record_flags(TheInstance(forward)) & instflags_relocated_B) {
  893. var gcv_object_t target = TheInstance(forward)->GCself;
  894. var aint backchain = p2;
  895. for (;;) {
  896. var gcv_object_t backpointer = ((Varobject)backchain)->GCself;
  897. ((Varobject)backchain)->GCself = target;
  898. record_flags_set((Record)backchain,instflags_relocated_B);
  899. if (record_flags((Record)backchain) & instflags_backpointer_B)
  900. backchain = (aint)ThePointer(without_mark_bit(backpointer));
  901. else
  902. break;
  903. }
  904. } else {
  905. /* Leave a backpointer for later fixup.
  906. Each instance can have only one forward pointer directly pointing
  907. to it. This ensures that the backchain is a singly linked list. */
  908. if (record_flags(TheInstance(forward)) & instflags_backpointer_B)
  909. /*NOTREACHED*/ abort();
  910. #ifdef TYPECODES
  911. /* The type is either instance_type or closure_type. */
  912. var tint type = mtypecode(((Varobject)p2)->GCself) & ~bit(garcol_bit_t);
  913. #endif
  914. TheInstance(forward)->GCself = with_mark_bit(make_GCself(type,p2));
  915. record_flags_set(TheInstance(forward),instflags_backpointer_B);
  916. }
  917. /* Don't reclaim the space at p2 during this GC, because
  918. 1. we need the mark bit at p2 so that update() does the
  919. relocation, and the mark bit tells gc_sweep2_varobject_page
  920. that the object is not yet reclaimed.
  921. 2. otherwise last_open_ptr may be set to &((Varobject)p2)->GCself
  922. later. */
  923. }
  924. local void gc_sweep1_instance_target (aint p2, aint p1) {
  925. if (record_flags((Instance)p2) & instflags_relocated_B)
  926. /*NOTREACHED*/ abort();
  927. #ifdef TYPECODES
  928. /* The type is either instance_type or closure_type. */
  929. var tint type = mtypecode(((Varobject)p2)->GCself) & ~bit(garcol_bit_t);
  930. #endif
  931. var gcv_object_t target; target = with_mark_bit(make_GCself(type,p1));
  932. var aint backchain = p2;
  933. for (;;) {
  934. var gcv_object_t backpointer = ((Varobject)backchain)->GCself;
  935. ((Varobject)backchain)->GCself = target;
  936. record_flags_set((Record)backchain,instflags_relocated_B);
  937. if (record_flags((Record)backchain) & instflags_backpointer_B)
  938. backchain = (aint)ThePointer(without_mark_bit(backpointer));
  939. else
  940. break;
  941. }
  942. }
  943. /* Prepare objects of variable length between page->page_start and
  944. page->page_end for compacting below. Therefore, in each marked
  945. object the pointer in front is pointed to the location, where the
  946. object will be located later (including typeinfo). If the sequencing
  947. object is unmarked, then its first pointer is oriented to the address
  948. of the next marked object. */
  949. #ifdef SPVW_PURE
  950. local aint gc_sweep1_varobject_page (uintL heapnr, aint start, aint end, gcv_object_t* firstmarked, aint dest)
  951. #elif defined(GENERATIONAL_GC)
  952. local aint gc_sweep1_varobject_page (aint start, aint end, gcv_object_t* firstmarked, aint dest)
  953. #else
  954. local void gc_sweep1_varobject_page (Page* page)
  955. #endif
  956. {
  957. #if defined(SPVW_PURE) || defined(GENERATIONAL_GC)
  958. var gcv_object_t* last_open_ptr = firstmarked;
  959. var aint p2 = start; /* source-pointer */
  960. var aint p2end = end; /* upper bound of the source-region */
  961. var aint p1 = dest; /* destination-pointer */
  962. #else
  963. var gcv_object_t* last_open_ptr = &page->page_gcpriv.firstmarked;
  964. /* In *last_open_ptr, always store the address of the next marked
  965. object (als oint) .
  966. Via chained-list-mechanism: At the end, page->page_gcpriv.firstmarked
  967. contains the address of the 1. marked object */
  968. var aint p2 = page->page_start; /* source-pointer */
  969. var aint p2end = page->page_end; /* upper bound of the source-region */
  970. var aint p1 = p2; /* destination-pointer */
  971. #endif
  972. /* start <= p1 <= p2 <= end, p1 and p2 grow, p2 faster than p1. */
  973. var_prepare_objsize;
  974. sweeploop1:
  975. /* search next marked object.
  976. enter address of the next marked object in *last_open_ptr . */
  977. if (p2==p2end) /* upper bound reached -> finished */
  978. goto sweepok1;
  979. {
  980. #ifdef TYPECODES
  981. var tint flags = mtypecode(((Varobject)p2)->GCself);
  982. /* save typeinfo (and flags for symbols) */
  983. #endif
  984. var uintM laenge = objsize((Varobject)p2); /* determine byte-length */
  985. if (!marked(p2)) { /* object unmarked? */
  986. p2 += laenge; goto sweeploop1; /* yes -> goto next object */
  987. }
  988. /* object marked
  989. Elimination of forward pointers: */
  990. #ifdef HAVE_SMALL_SSTRING
  991. #ifdef SPVW_PURE
  992. if (heapnr == sstring_type)
  993. #else
  994. #ifdef TYPECODES
  995. if ((flags & ~bit(garcol_bit_t)) == sstring_type)
  996. #else
  997. /* NB: No need to handle Rectype_[Imm_]S8string here. */
  998. if ((uintB)(record_type((Record)p2) - Rectype_S16string)
  999. <= Rectype_reallocstring - Rectype_S16string)
  1000. #endif
  1001. #endif
  1002. {
  1003. if (sstring_reallocatedp((Sstring)p2)) {
  1004. /* A forward pointer. */
  1005. gc_sweep1_sstring_forward(p2);
  1006. } else {
  1007. /* Possibly the target of a forward pointer. */
  1008. gc_sweep1_sstring_target(p2,p1);
  1009. }
  1010. }
  1011. else
  1012. #endif
  1013. #ifdef SPVW_PURE
  1014. if (heapnr == instance_type
  1015. || (heapnr == closure_type
  1016. && (closure_flags((Closure)p2) & closflags_instance_B)))
  1017. #else
  1018. #ifdef TYPECODES
  1019. if ((flags & ~bit(garcol_bit_t)) == instance_type
  1020. || ((flags & ~bit(garcol_bit_t)) == closure_type
  1021. && (closure_flags((Closure)p2) & closflags_instance_B)))
  1022. #else
  1023. if (record_type((Record)p2) == Rectype_Instance
  1024. || (record_type((Record)p2) == Rectype_Closure
  1025. && (closure_flags((Closure)p2) & closflags_instance_B)))
  1026. #endif
  1027. #endif
  1028. {
  1029. if (record_flags((Instance)p2) & instflags_forwarded_B) {
  1030. /* A forward pointer. */
  1031. gc_sweep1_instance_forward(p2);
  1032. } else {
  1033. /* Possibly the target of a forward pointer. */
  1034. gc_sweep1_instance_target(p2,p1);
  1035. }
  1036. }
  1037. else {
  1038. set_GCself(p2, flags,p1); /* enter new address, with old */
  1039. /* typeinfo (the mark bit is contained within) */
  1040. #ifndef TYPECODES
  1041. mark(p2);
  1042. #endif
  1043. }
  1044. *last_open_ptr = pointer_as_object(p2); /* store address */
  1045. p2 += laenge; /* source address for next object */
  1046. p1 += laenge; /* destination address for next object */
  1047. }
  1048. sweeploop2:
  1049. /* search next unmarked object. */
  1050. if (p2==p2end) /* upper bound reached -> finished */
  1051. goto sweepok2;
  1052. {
  1053. #ifdef TYPECODES
  1054. var tint flags = mtypecode(((Varobject)p2)->GCself);
  1055. /* save typeinfo (and flags for symbols) */
  1056. #endif
  1057. var uintM laenge = objsize((Varobject)p2); /* determine byte-length */
  1058. if (!marked(p2)) { /* object unmarked? */
  1059. last_open_ptr = (gcv_object_t*)p2; /* yes -> store the next pointer here */
  1060. p2 += laenge; goto sweeploop1; /* goto next object */
  1061. }
  1062. /* object marked
  1063. Elimination of forward pointers: */
  1064. #ifdef HAVE_SMALL_SSTRING
  1065. #ifdef SPVW_PURE
  1066. if (heapnr == sstring_type)
  1067. #else
  1068. #ifdef TYPECODES
  1069. if ((flags & ~bit(garcol_bit_t)) == sstring_type)
  1070. #else
  1071. /* NB: No need to handle Rectype_[Imm_]S8string here. */
  1072. if ((uintB)(record_type((Record)p2) - Rectype_S16string)
  1073. <= Rectype_reallocstring - Rectype_S16string)
  1074. #endif
  1075. #endif
  1076. {
  1077. if (sstring_reallocatedp((Sstring)p2)) {
  1078. /* A forward pointer. */
  1079. gc_sweep1_sstring_forward(p2);
  1080. } else {
  1081. /* Possibly the target of a forward pointer. */
  1082. gc_sweep1_sstring_target(p2,p1);
  1083. }
  1084. }
  1085. else
  1086. #endif
  1087. #ifdef SPVW_PURE
  1088. if (heapnr == instance_type
  1089. || (heapnr == closure_type
  1090. && (closure_flags((Closure)p2) & closflags_instance_B)))
  1091. #else
  1092. #ifdef TYPECODES
  1093. if ((flags & ~bit(garcol_bit_t)) == instance_type
  1094. || ((flags & ~bit(garcol_bit_t)) == closure_type
  1095. && (closure_flags((Closure)p2) & closflags_instance_B)))
  1096. #else
  1097. if (record_type((Record)p2) == Rectype_Instance
  1098. || (record_type((Record)p2) == Rectype_Closure
  1099. && (closure_flags((Closure)p2) & closflags_instance_B)))
  1100. #endif
  1101. #endif
  1102. {
  1103. if (record_flags((Instance)p2) & instflags_forwarded_B) {
  1104. /* A forward pointer. */
  1105. gc_sweep1_instance_forward(p2);
  1106. } else {
  1107. /* Possibly the target of a forward pointer. */
  1108. gc_sweep1_instance_target(p2,p1);
  1109. }
  1110. }
  1111. else {
  1112. set_GCself(p2, flags,p1); /* enter new address, with old */
  1113. /* typeinfo (the mark bit is contained within) */
  1114. #ifndef TYPECODES
  1115. mark(p2);
  1116. #endif
  1117. }
  1118. p2 += laenge; /* source address for next object */
  1119. p1 += laenge; /* destination address for next object */
  1120. goto sweeploop2;
  1121. }
  1122. sweepok1: { *last_open_ptr = pointer_as_object(p2); }
  1123. sweepok2: ;
  1124. #if defined(SPVW_PURE) || defined(GENERATIONAL_GC)
  1125. return p1;
  1126. #endif
  1127. }
  1128. /* update phase:
  1129. The entire LISP-memory is perused and old addresses are replaced
  1130. with new ones.
  1131. update of an object *objptr : */
  1132. #if !defined(MORRIS_GC)
  1133. #ifdef TYPECODES
  1134. #define update(objptr) \
  1135. { var tint type = mtypecode(*(gcv_object_t*)objptr); \
  1136. if (!gcinvariant_type_p(type)) { /* un-movable -> do nothing */ \
  1137. var object obj = *(gcv_object_t*)objptr; /* object */ \
  1138. if (!in_old_generation(obj,type,mem.heapnr_from_type[type])) \
  1139. /* older generation -> do nothing (object stayed there) */ \
  1140. if (marked(ThePointer(obj))) { /* marked? */ \
  1141. /* no -> do nothing (object stayed there) \
  1142. yes -> enter new address and typeinfobyte (incl. \
  1143. poss. symbol-binding-flag) */ \
  1144. var object newptr = \
  1145. type_untype_object(type,untype(*(gcv_object_t*)ThePointer(obj))); \
  1146. DEBUG_SPVW_ASSERT(is_valid_heap_object_address(as_oint(newptr)) \
  1147. || is_valid_stack_address(as_oint(newptr))); \
  1148. *(gcv_object_t*)objptr = newptr; \
  1149. } \
  1150. } \
  1151. }
  1152. #else
  1153. #ifdef GENERATIONAL_GC
  1154. #define update(objptr) \
  1155. { var object obj = *(gcv_object_t*)objptr; /* object */ \
  1156. if (!gcinvariant_object_p(obj)) /* un-movable -> do nothing */ \
  1157. if (!(consp(obj) ? in_old_generation(obj,,1) : in_old_generation(obj,,0))) \
  1158. /* older generation -> do nothing (object stayed there) */ \
  1159. if (marked(ThePointer(obj))) { /* marked? */ \
  1160. /* no -> do nothing (object stayed there) \
  1161. yes -> enter new address */ \
  1162. var object newptr = \
  1163. as_object((as_oint(obj) & nonimmediate_bias_mask) | (as_oint(*(gcv_object_t*)ThePointer(obj)) & ~wbit(garcol_bit_o))); \
  1164. DEBUG_SPVW_ASSERT((consp(obj) ? is_valid_cons_address(as_oint(newptr)) : is_valid_varobject_address(as_oint(newptr))) \
  1165. || is_valid_stack_address(as_oint(newptr))); \
  1166. *(gcv_object_t*)objptr = newptr; \
  1167. } \
  1168. }
  1169. #else
  1170. #define update(objptr) \
  1171. { var object obj = *(gcv_object_t*)objptr; /* object */ \
  1172. if (!gcinvariant_object_p(obj)) /* un-movable -> do nothing */ \
  1173. if (!in_old_generation(obj,,)) \
  1174. /* older generation -> do nothing (object stayed there) */ \
  1175. if (marked(ThePointer(obj))) { /* marked? */ \
  1176. /* no -> do nothing (object stayed there) \
  1177. yes -> enter new address */ \
  1178. var object newptr = \
  1179. as_object((as_oint(obj) & nonimmediate_bias_mask) | (as_oint(*(gcv_object_t*)ThePointer(obj)) & ~wbit(garcol_bit_o))); \
  1180. DEBUG_SPVW_ASSERT((consp(obj) ? is_valid_cons_address(as_oint(newptr)) : is_valid_varobject_address(as_oint(newptr))) \
  1181. || is_valid_stack_address(as_oint(newptr))); \
  1182. *(gcv_object_t*)objptr = newptr; \
  1183. } \
  1184. }
  1185. #endif
  1186. #endif
  1187. #else /* defined(MORRIS_GC) */
  1188. #if defined(SPVW_MIXED_BLOCKS)
  1189. #ifdef TYPECODES
  1190. #define update(objptr) \
  1191. { var tint type = mtypecode(*(gcv_object_t*)objptr); \
  1192. if (!gcinvariant_type_p(type)) /* un-movable -> do nothing */ \
  1193. switch (type) { \
  1194. default: { /* object of variable length */ \
  1195. var object obj = *(gcv_object_t*)objptr; /* object */ \
  1196. if (!in_old_generation(obj,type,0)) \
  1197. if (marked(ThePointer(obj))) { /* marked? */ \
  1198. var object newptr = \
  1199. type_untype_object(type,untype(*(gcv_object_t*)ThePointer(obj))); \
  1200. /*DEBUG_SPVW_ASSERT(is_valid_varobject_address(as_oint(newptr))\
  1201. || is_valid_stack_address(as_oint(newptr)));*/ \
  1202. *(gcv_object_t*)objptr = newptr; \
  1203. } \
  1204. } \
  1205. break; \
  1206. case_pair: { /* Two-Pointer-Object */ \
  1207. var object obj = *(gcv_object_t*)objptr; /* object */ \
  1208. if (!in_old_generation(obj,type,1)) { \
  1209. /* for later update, insert into its list: */ \
  1210. *(gcv_object_t*)objptr = *(gcv_object_t*)ThePointer(obj); \
  1211. *(gcv_object_t*)ThePointer(obj) = with_mark_bit(type_pointer_object(type,objptr)); \
  1212. } \
  1213. } \
  1214. break; \
  1215. } \
  1216. }
  1217. #else
  1218. #define update(objptr) \
  1219. { var object obj = *(gcv_object_t*)objptr; /* object */ \
  1220. if (!gcinvariant_object_p(obj)) { \
  1221. if (consp(obj)) { /* Two-Pointer-Object */ \
  1222. if (!in_old_generation(obj,,1)) { \
  1223. /* for later update, insert into its list: */ \
  1224. *(gcv_object_t*)objptr = *(gcv_object_t*)ThePointer(obj); \
  1225. *(gcv_object_t*)ThePointer(obj) = with_mark_bit(as_object((as_oint(obj) & nonimmediate_bias_mask) | (oint)objptr)); \
  1226. } \
  1227. } else { /* object of variable length */ \
  1228. if (!in_old_generation(obj,,0)) { \
  1229. if (marked(ThePointer(obj))) { /* marked? */ \
  1230. var object newptr = \
  1231. as_object((as_oint(obj) & nonimmediate_bias_mask) | (as_oint(*(gcv_object_t*)ThePointer(obj)) & ~wbit(garcol_bit_o) & ~(oint)nonimmediate_bias_mask)); \
  1232. DEBUG_SPVW_ASSERT(is_valid_varobject_address(as_oint(newptr)) \
  1233. || is_valid_stack_address(as_oint(newptr))); \
  1234. *(gcv_object_t*)objptr = newptr; \
  1235. } \
  1236. } \
  1237. } \
  1238. } \
  1239. }
  1240. #endif
  1241. #else /* defined(SPVW_PURE_BLOCKS) - && defined(SINGLEMAP_MEMORY) */
  1242. #define update(objptr) \
  1243. { var tint type = mtypecode(*(gcv_object_t*)objptr); \
  1244. if (!is_unused_heap(type)) { /* unmovable -> do nothing */ \
  1245. var object obj = *(gcv_object_t*)objptr; /* object */ \
  1246. if (!in_old_generation(obj,type,?)) { \
  1247. /* older generation -> do nothing (object stayed there) */ \
  1248. if (is_varobject_heap(type)) { /* object of variable length */ \
  1249. if (marked(ThePointer(obj))) { /* marked? */ \
  1250. var object newptr = \
  1251. type_untype_object(type,untype(*(gcv_object_t*)ThePointer(obj))); \
  1252. DEBUG_SPVW_ASSERT(is_valid_varobject_address(as_oint(newptr)) \
  1253. || is_valid_stack_address(as_oint(newptr))); \
  1254. *(gcv_object_t*)objptr = newptr; \
  1255. } \
  1256. } else { /* Two-Pointer-Object */ \
  1257. /* for later update, insert into its list: */ \
  1258. *(gcv_object_t*)objptr = *(gcv_object_t*)ThePointer(obj); \
  1259. *(gcv_object_t*)ThePointer(obj) = with_mark_bit(pointer_as_object(objptr)); \
  1260. } \
  1261. } \
  1262. } \
  1263. }
  1264. #endif
  1265. #endif
  1266. #ifndef NO_symbolflags
  1267. #define update_stackobj(objptr) \
  1268. switch (mtypecode(*objptr)) { \
  1269. case_symbolflagged: { /* symbol, poss. with flags */ \
  1270. var object obj1 = *objptr; \
  1271. var object obj2 = symbol_without_flags(obj1); \
  1272. var oint flags = as_oint(obj1) ^ as_oint(obj2); \
  1273. *objptr = obj2; /* delete flags */ \
  1274. update(objptr); /* then update */ \
  1275. *objptr = as_object(as_oint(*objptr) | flags); /* then back again */ \
  1276. break; \
  1277. } \
  1278. default: update(objptr); break; \
  1279. }
  1280. #else
  1281. #define update_stackobj(objptr) update(objptr);
  1282. #endif
  1283. /* update of old generation: */
  1284. #include "spvw_genera3.c"
  1285. /* second SWEEP-phase:
  1286. relocation of an object of variable length, advance p1 and p2:
  1287. move_aligned_p1_p2(count); */
  1288. #if (varobject_alignment==1)
  1289. #define uintVLA uintB
  1290. #elif (varobject_alignment==2)
  1291. #define uintVLA uintW
  1292. #elif (varobject_alignment==4)
  1293. #define uintVLA uintL
  1294. #elif (varobject_alignment==8)
  1295. #define uintVLA uintL2
  1296. #else
  1297. #error "Unknown value for 'varobject_alignment'!"
  1298. #endif
  1299. #if defined(GNU) && (__GNUC__ < 3) && !defined(__cplusplus) /* better for optimization */
  1300. #if defined(fast_dotimesL) && (intMsize==intLsize)
  1301. #define move_aligned_p1_p2(count) \
  1302. dotimespL(count,count/varobject_alignment, *((uintVLA*)p2)++ = *((uintVLA*)p1)++; )
  1303. #else
  1304. #define move_aligned_p1_p2(count) \
  1305. do { *((uintVLA*)p2)++ = *((uintVLA*)p1)++; count -= varobject_alignment; } while (count!=0)
  1306. #endif
  1307. #else /* other compilers do not accept ((type*)p)++ . */
  1308. /* how efficient is this here?? */
  1309. #define move_aligned_p1_p2(count) \
  1310. do { \
  1311. *(uintVLA*)p2 = *(uintVLA*)p1; \
  1312. p1 += varobject_alignment; p2 += varobject_alignment; \
  1313. count -= varobject_alignment; \
  1314. } while (count!=0)
  1315. #endif
  1316. /* the objects of variable length are moved into the preordained
  1317. new places. */
  1318. #ifdef SPVW_PURE
  1319. local void gc_sweep2_varobject_page (Page* page, uintL heapnr)
  1320. #else
  1321. local void gc_sweep2_varobject_page (Page* page)
  1322. #endif
  1323. {
  1324. /* peruse from below and shift down: */
  1325. var aint p1 = (aint)pointer_was_object(page->page_gcpriv.firstmarked); /* source-pointer, first marked object */
  1326. var aint p1end = page->page_end;
  1327. var aint p2 = page->page_start; /* destination-pointer */
  1328. var_prepare_objsize;
  1329. while (p1!=p1end) { /* upper bound reached -> finished */
  1330. /* next object has address p1 */
  1331. if (marked(p1)) { /* marked? */
  1332. unmark(p1); /* delete mark */
  1333. /* keep object and relocate: */
  1334. var uintM count = objsize((Varobject)p1); /* length (divisible by varobject_alignment , >0) */
  1335. if (p1!=p2) { /* if relocation is necessary */
  1336. move_aligned_p1_p2(count); /* relocate and advance */
  1337. } else { /* else only advance: */
  1338. p1 += count; p2 += count;
  1339. }
  1340. } else {
  1341. p1 = (aint)pointer_was_object(*(gcv_object_t*)p1); /* with pointer (typeinfo=0) to the next marked object */
  1342. }
  1343. }
  1344. page->page_end = p2; /* set upper bound of the objects of variable length */
  1345. }
  1346. #if defined(DEBUG_SPVW) && !defined(GENERATIONAL_GC) && !defined(TYPECODES)
  1347. /* check, if everything is really unmarked: */
  1348. #define CHECK_GC_UNMARKED() gc_unmarkcheck()
  1349. local void gc_unmarkcheck (void) {
  1350. for_each_varobject_page(page, { /* peruse from above: */
  1351. var aint p1 = page->page_start;
  1352. var aint p1end = page->page_end;
  1353. var_prepare_objsize;
  1354. while (p1!=p1end) { /* lower bound reached -> finished */
  1355. /* next object has address p1 */
  1356. if (marked(p1)) { /* marked? */
  1357. fprintf(stderr,"\n[%s:%d] Object 0x%lx in [0x%lx 0x%lx] marked!!\n",
  1358. __FILE__,__LINE__,p1,page->page_start,page->page_end);
  1359. abort();
  1360. }
  1361. p1 += objsize((Varobject)p1);
  1362. }
  1363. });
  1364. for_each_cons_page(page, { /* peruse from below: */
  1365. var aint p1 = page->page_start;
  1366. var aint p1end = page->page_end;
  1367. while (p1!=p1end) { /* upper bound reached -> finished */
  1368. /* next object has address p1 */
  1369. if (marked(p1)) { /* marked? */
  1370. fprintf(stderr,"\n[%s:%d] Object 0x%lx in [0x%lx 0x%lx] marked!!\n",
  1371. __FILE__,__LINE__,p1,page->page_start,page->page_end);
  1372. abort();
  1373. }
  1374. p1 += sizeof(cons_);
  1375. }
  1376. });
  1377. }
  1378. #else
  1379. #define CHECK_GC_UNMARKED()
  1380. #endif
  1381. #ifdef DEBUG_SPVW
  1382. /* check against nullpointer: */
  1383. #define CHECK_NULLOBJ() nullobjcheck(false)
  1384. local void nullobjcheck (bool in_gc);
  1385. local void nullobjcheck_range (aint p1, aint p1end, bool in_gc)
  1386. {
  1387. while (p1!=p1end) { /* upper bound reached -> finished */
  1388. /* next object has address p1 */
  1389. if (eq(((Cons)p1)->cdr,nullobj) || eq(((Cons)p1)->car,nullobj))
  1390. if (!(in_gc && eq(((Cons)p1)->cdr,nullobj) && eq(((Cons)p1)->car,nullobj)))
  1391. abort();
  1392. p1 += sizeof(cons_);
  1393. }
  1394. }
  1395. local void nullobjcheck (bool in_gc)
  1396. {
  1397. /* peruse from below: */
  1398. #ifdef GENERATIONAL_GC
  1399. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  1400. for_each_cons_heap(heap, {
  1401. nullobjcheck_range(heap->heap_start,heap->heap_gen1_end,in_gc);
  1402. nullobjcheck_range(heap->heap_gen0_start,heap->heap_gen0_end,in_gc);
  1403. });
  1404. #else
  1405. for_each_cons_heap(heap, {
  1406. nullobjcheck_range(heap->heap_gen0_start,heap->heap_gen0_end,in_gc);
  1407. nullobjcheck_range(heap->heap_gen1_start,heap->heap_end,in_gc);
  1408. });
  1409. #endif
  1410. #else
  1411. for_each_cons_page(page, {
  1412. nullobjcheck_range(page->page_start,page->page_end,in_gc);
  1413. });
  1414. #endif
  1415. }
  1416. #else
  1417. #define CHECK_NULLOBJ()
  1418. #endif
  1419. #ifdef SPVW_PAGES
  1420. /* free superfluous pages:
  1421. if the space that is available in mem.free_pages after GC,
  1422. amounts to more than 25% of what we currently need, the rest
  1423. is returned back to the operating system. */
  1424. local void free_some_unused_pages (void)
  1425. {
  1426. var uintM needed_space = floor(mem.last_gcend_space,4); /* 25% */
  1427. var uintM accu_space = 0;
  1428. var Pages* pageptr = &mem.free_pages;
  1429. var Pages page = *pageptr;
  1430. while (page!=NULL) {
  1431. var Pages nextpage = (Pages) page->page_gcpriv.next;
  1432. if (accu_space < needed_space) {
  1433. /* retain page */
  1434. accu_space += page->page_room;
  1435. pageptr = (Pages*)&page->page_gcpriv.next; page = nextpage;
  1436. } else {
  1437. /* free page */
  1438. free_page(page); page = *pageptr = nextpage;
  1439. }
  1440. }
  1441. }
  1442. #endif
  1443. /* perform normal Garbage Collection: */
  1444. local void gar_col_normal (void)
  1445. {
  1446. var uintM gcstart_space; /* occupied memory at GC-start */
  1447. var uintM gcend_space; /* occupied memory at GC-end */
  1448. var object all_weakpointers; /* list of active Weak-pointers */
  1449. var object all_finalizers; /* list of finalizers */
  1450. #ifdef GC_CLOSES_FILES
  1451. var object files_to_close; /* list of files to be closed */
  1452. #endif
  1453. set_break_sem_1(); /* disable BREAK during Garbage Collection */
  1454. gc_signalblock_on(); /* disable Signals during Garbage Collection */
  1455. gc_timer_on();
  1456. gcstart_space = used_space(); /* detect occupied memory */
  1457. #ifdef HAVE_VADVISE
  1458. begin_system_call();
  1459. vadvise(VA_ANOM); /* Paging-behaviour now becomes a little unusual */
  1460. end_system_call();
  1461. #endif
  1462. CHECK_GC_UNMARKED(); CHECK_NULLOBJ(); CHECK_GC_CACHE(); CHECK_GC_GENERATIONAL(); SAVE_GC_DATA();
  1463. #ifdef SPVW_PAGES
  1464. {
  1465. var uintL heapnr;
  1466. for (heapnr=0; heapnr<heapcount; heapnr++) {
  1467. AVL_map(mem.heaps[heapnr].inuse,page,
  1468. page->page_room += page->page_end;);
  1469. /* the end of usable space is stored in page_room. */
  1470. }
  1471. }
  1472. #endif
  1473. #ifdef GENERATIONAL_GC
  1474. if (generation == 0) {
  1475. /* update old generation with help of the cache: */
  1476. prepare_old_generation();
  1477. } else {
  1478. /* only treat the new generation. Hide old generation: */
  1479. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  1480. mem.varobjects.heap_start = mem.varobjects.heap_gen1_start;
  1481. mem.conses.heap_end = mem.conses.heap_gen1_end;
  1482. #else
  1483. var uintL heapnr;
  1484. for (heapnr=0; heapnr<heapcount; heapnr++)
  1485. mem.heaps[heapnr].heap_start = mem.heaps[heapnr].heap_gen1_start;
  1486. #endif
  1487. }
  1488. #endif
  1489. CHECK_GC_GENERATIONAL();
  1490. /* mark phase: */
  1491. all_weakpointers = O(all_weakpointers); O(all_weakpointers) = Fixnum_0;
  1492. all_finalizers = O(all_finalizers); O(all_finalizers) = Fixnum_0;
  1493. #ifdef GC_CLOSES_FILES
  1494. files_to_close = O(open_files); O(open_files) = NIL; /* O(files_to_close) = NIL; */
  1495. #endif
  1496. gc_markphase();
  1497. gc_mark_weakpointers(all_weakpointers);
  1498. /* Now only, after gc_mark_weakpointers, can alive() be called.
  1499. FIXME: This use of alive() and gc_mark() doesn't integrate well with
  1500. the weak-pointer handling.
  1501. Split (still unmarked) list all_finalizers into two lists: */
  1502. {
  1503. var object Lu = all_finalizers;
  1504. var gcv_object_t* L1 = &O(all_finalizers);
  1505. var gcv_object_t* L2 = &O(pending_finalizers);
  1506. while (!(eq(*L2,Fixnum_0))) {
  1507. L2 = &TheFinalizer(*L2)->fin_cdr;
  1508. }
  1509. while (!(eq(Lu,Fixnum_0))) {
  1510. /* if fin_alive is dead, the finalizer is thrown away,
  1511. without being executed: */
  1512. if (!alive(TheFinalizer(Lu)->fin_alive)) {
  1513. Lu = TheFinalizer(Lu)->fin_cdr;
  1514. } else {
  1515. /* if fin_trigger dies, the finalizer is executed: */
  1516. if (alive(TheFinalizer(Lu)->fin_trigger)) { /* is fin_trigger still alive? */
  1517. /* yes -> take over in O(all_finalizers) : */
  1518. *L1 = Lu; L1 = &TheFinalizer(Lu)->fin_cdr; Lu = *L1;
  1519. } else {
  1520. /* no -> take over in O(pending_finalizers) : */
  1521. *L2 = Lu; L2 = &TheFinalizer(Lu)->fin_cdr; Lu = *L2;
  1522. }
  1523. }
  1524. }
  1525. *L1 = Fixnum_0; *L2 = Fixnum_0;
  1526. }
  1527. gc_mark(O(all_finalizers)); gc_mark(O(pending_finalizers)); /* mark both lists now */
  1528. #ifdef GC_CLOSES_FILES
  1529. /* FIXME: This use of marked() and gc_mark() doesn't integrate well with
  1530. the weak-pointer handling.
  1531. Split (still unmarked) list files_to_close into two lists: */
  1532. {
  1533. var object Lu = files_to_close;
  1534. var gcv_object_t* L1 = &O(open_files);
  1535. var gcv_object_t* L2 = &O(files_to_close);
  1536. while (consp(Lu)) {
  1537. if (in_old_generation(Car(Lu),stream_type,0)
  1538. || marked(TheStream(Car(Lu)))) { /* (car Lu) marked? */
  1539. /* yes -> take over in O(open_files) : */
  1540. *L1 = Lu; L1 = &Cdr(Lu); Lu = *L1;
  1541. } else {
  1542. /* no -> take over in O(files_to_close) : */
  1543. *L2 = Lu; L2 = &Cdr(Lu); Lu = *L2;
  1544. }
  1545. }
  1546. *L1 = NIL; *L2 = NIL;
  1547. }
  1548. gc_mark(O(open_files)); gc_mark(O(files_to_close)); /* mark both lists now */
  1549. #endif
  1550. /* No more gc_mark operations from here on. */
  1551. clean_weakpointers(all_weakpointers);
  1552. #if defined(USE_JITC)
  1553. gc_scan_jitc_objects();
  1554. #endif
  1555. inside_gc = true;
  1556. /* All active objects are marked now:
  1557. active objects of variable length and active two-pointer-objects carry
  1558. in their first byte a set mark bit, active SUBRs carry
  1559. in their first constant pointer a set mark bit, all other
  1560. mark bits are deleted.
  1561. "Sweep"-Phase:
  1562. the CONSes and similar (objects with 2 pointers) are compacted.
  1563. the destinations of the objects of variable length for phase 4
  1564. are calculated and stored.
  1565. SUBRs and fixed symbols (they are all active) are unmarked first: */
  1566. unmark_fixed_varobjects();
  1567. #ifndef MORRIS_GC
  1568. /* compact CONS-cells: */
  1569. for_each_cons_page(page, { gc_compact_cons_page(page); } );
  1570. #endif
  1571. /* prepare objects of variable length for compacting below: */
  1572. #ifdef SPVW_PURE
  1573. #ifdef GENERATIONAL_GC
  1574. if (generation == 0) {
  1575. for_each_varobject_heap(heap, {
  1576. if (heap->heap_gen0_end < heap->heap_gen1_start) {
  1577. /* Bridge the gap by putting a pointer. */
  1578. var aint tmp =
  1579. gc_sweep1_varobject_page(heapnr,
  1580. heap->heap_gen0_start,heap->heap_gen0_end,
  1581. &heap->pages.page_gcpriv.firstmarked,
  1582. heap->heap_gen0_start);
  1583. gc_sweep1_varobject_page(heapnr,
  1584. heap->heap_gen1_start,heap->heap_end,
  1585. (gcv_object_t*)heap->heap_gen0_end,
  1586. tmp);
  1587. } else { /* no gap */
  1588. gc_sweep1_varobject_page(heapnr,
  1589. heap->heap_gen0_start,heap->heap_end,
  1590. &heap->pages.page_gcpriv.firstmarked,
  1591. heap->heap_gen0_start);
  1592. }
  1593. });
  1594. } else
  1595. #endif
  1596. for_each_varobject_page(page, {
  1597. gc_sweep1_varobject_page(heapnr,
  1598. page->page_start,page->page_end,
  1599. &page->page_gcpriv.firstmarked,
  1600. page->page_start);
  1601. });
  1602. #else /* SPVW_MIXED */
  1603. #ifdef GENERATIONAL_GC
  1604. if (generation == 0) {
  1605. for_each_varobject_heap(heap, {
  1606. if (heap->heap_gen0_end < heap->heap_gen1_start) {
  1607. /* Bridge the gap by putting a pointer. */
  1608. var aint tmp =
  1609. gc_sweep1_varobject_page(heap->heap_gen0_start,heap->heap_gen0_end,
  1610. &heap->pages.page_gcpriv.firstmarked,
  1611. heap->heap_gen0_start);
  1612. gc_sweep1_varobject_page(heap->heap_gen1_start,heap->heap_end,
  1613. (gcv_object_t*)(heap->heap_gen0_end),
  1614. tmp);
  1615. } else { /* no gap */
  1616. gc_sweep1_varobject_page(heap->heap_gen0_start,heap->heap_end,
  1617. &heap->pages.page_gcpriv.firstmarked,
  1618. heap->heap_gen0_start);
  1619. }
  1620. });
  1621. } else
  1622. for_each_varobject_page(page, {
  1623. gc_sweep1_varobject_page(page->page_start,page->page_end,
  1624. &page->page_gcpriv.firstmarked,
  1625. page->page_start);
  1626. });
  1627. #else
  1628. for_each_varobject_page(page, { gc_sweep1_varobject_page(page); } );
  1629. #endif
  1630. #endif
  1631. /* Now all active objects are prepared for update:
  1632. For active objects of variable length at objptr, *objptr is the address,
  1633. where the object will be situated after the GC (incl. Typeinfo and
  1634. mark bit and poss. symbol-flags).
  1635. For active two-pointer-objects at objptr, objptr either stays where it is
  1636. (then the mark bit in *objptr is cleared), or objptr is relocated
  1637. (then *objptr is the new address, without typeinfo, but including mark
  1638. bit).
  1639. update phase:
  1640. The entire LISP-memory is perused and old addresses
  1641. are replaced with new ones. */
  1642. #ifdef MORRIS_GC
  1643. for_each_cons_page(page, { gc_morris1(page); } );
  1644. #endif
  1645. /* peruse all LISP-objects and update:
  1646. Update pointers in all LISP-stacks: */
  1647. update_STACKs();
  1648. /* Update pointers in all C stacks: */
  1649. update_back_traces();
  1650. /* Update program constants: */
  1651. update_tables();
  1652. #ifndef MORRIS_GC
  1653. /* update pointers in the Cons-cells: */
  1654. #define update_conspage update_conspage_normal
  1655. update_conses();
  1656. #undef update_conspage
  1657. #endif
  1658. /* update the pointers in the objects of variable length: */
  1659. #define update_page(page,updater) \
  1660. { var aint ptr = (aint)pointer_was_object(page->page_gcpriv.firstmarked); \
  1661. var aint ptrend = page->page_end; \
  1662. /* peruse all objects with address >=ptr, <ptrend : */ \
  1663. while (ptr!=ptrend) { /* until ptr has reached the end */ \
  1664. /* peruse next object with address ptr (< ptrend) : */ \
  1665. if (marked(ptr)) { /* marked? */ \
  1666. /* take typeinfo without mark bit! */ \
  1667. updater(typecode_at(ptr) & ~bit(garcol_bit_t)); \
  1668. } else { \
  1669. /* go with pointer (typeinfo=0) to the next marked object */ \
  1670. ptr = (aint)pointer_was_object(*(gcv_object_t*)ptr); \
  1671. } \
  1672. } \
  1673. }
  1674. #ifdef GENERATIONAL_GC
  1675. #define update_hashtable_invalid false
  1676. #else
  1677. #define update_hashtable_invalid true
  1678. #endif
  1679. #define update_unrealloc true
  1680. #define update_fpointer_invalid false
  1681. #define update_fsubr_function false
  1682. #define update_ht_invalid set_ht_invalid_if_needed
  1683. #define update_ss_unrealloc mark_sstring_clean
  1684. #define update_in_unrealloc mark_inst_clean
  1685. #define update_fp_invalid mark_fp_invalid
  1686. #define update_fs_function(ptr)
  1687. update_varobjects();
  1688. #undef update_fs_function
  1689. #undef update_fp_invalid
  1690. #undef update_in_unrealloc
  1691. #undef update_ss_unrealloc
  1692. #undef update_ht_invalid
  1693. #undef update_fsubr_function
  1694. #undef update_fpointer_invalid
  1695. #undef update_unrealloc
  1696. #undef update_hashtable_invalid
  1697. #undef update_page
  1698. #ifdef GENERATIONAL_GC
  1699. /* update pointers in the objects of the old generation: */
  1700. if (generation > 0)
  1701. update_old_generation();
  1702. #endif
  1703. #ifdef MORRIS_GC
  1704. /* finally, the conses are relocated and simultaneously, all
  1705. pointers to them (at present, maintained in lists!) are updated. */
  1706. for_each_cons_page_reversed(page, { gc_morris2(page); } );
  1707. #endif
  1708. inside_gc = false;
  1709. #ifdef MORRIS_GC
  1710. for_each_cons_page(page, { gc_morris3(page); } );
  1711. #endif
  1712. /* now, all active objects are provided with correct content (all
  1713. pointers within point to the correct addresses after the GC).
  1714. The active two-pointer-objects are already at the right location and
  1715. unmarked; the objects of variable length are still at the old
  1716. location and marked, if active.
  1717. Second SWEEP-phase:
  1718. The objects of variable length are moved to the previously
  1719. calculated new locations. */
  1720. #if !defined(GENERATIONAL_GC)
  1721. #ifdef SPVW_MIXED
  1722. for_each_varobject_page(page, { gc_sweep2_varobject_page(page); } );
  1723. #else /* SPVW_PURE */
  1724. for_each_varobject_page(page, { gc_sweep2_varobject_page(page,heapnr); } );
  1725. #endif
  1726. #else /* defined(GENERATIONAL_GC) */
  1727. {
  1728. var uintL heapnr;
  1729. for (heapnr=0; heapnr<heapcount; heapnr++) {
  1730. var Heap* heap = &mem.heaps[heapnr];
  1731. if (!is_unused_heap(heapnr)) {
  1732. if (is_varobject_heap(heapnr)) {
  1733. #ifdef SPVW_MIXED
  1734. gc_sweep2_varobject_page(&heap->pages);
  1735. #else /* SPVW_PURE */
  1736. gc_sweep2_varobject_page(&heap->pages,heapnr);
  1737. #endif
  1738. }
  1739. if (generation == 0) {
  1740. /* The remainder forms the new generation 0. */
  1741. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  1742. if (is_cons_heap(heapnr)) {
  1743. var aint start = heap->heap_start;
  1744. heap->heap_gen0_start = start;
  1745. start = start & -physpagesize;
  1746. heap->heap_start = heap->heap_gen1_end = start;
  1747. } else
  1748. #endif
  1749. {
  1750. var aint end = heap->heap_end;
  1751. heap->heap_gen0_end = end;
  1752. end = (end + (physpagesize-1)) & -physpagesize;
  1753. #if varobjects_misaligned
  1754. if (is_varobject_heap(heapnr)) {
  1755. end += varobjects_misaligned;
  1756. if (heap->heap_limit < end) {
  1757. if (end - heap->heap_limit > varobjects_misaligned)
  1758. abort();
  1759. heap->heap_limit = end;
  1760. }
  1761. }
  1762. #endif
  1763. heap->heap_gen1_start = heap->heap_end = end;
  1764. }
  1765. build_old_generation_cache(heapnr);
  1766. } else
  1767. rebuild_old_generation_cache(heapnr);
  1768. }
  1769. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  1770. if (is_cons_heap(heapnr))
  1771. heap->heap_end = heap->heap_gen0_end;
  1772. else
  1773. #endif
  1774. heap->heap_start = heap->heap_gen0_start;
  1775. }
  1776. }
  1777. #endif
  1778. /* Now, all active objects are provided with correct content,
  1779. at the right location, and unmarked again. */
  1780. #ifdef SPVW_PAGES
  1781. {
  1782. var uintL heapnr;
  1783. for (heapnr=0; heapnr<heapcount; heapnr++) {
  1784. var Pages* heapptr = &mem.heaps[heapnr].inuse;
  1785. AVL_map(*heapptr,page,
  1786. page->page_room -= page->page_end;);
  1787. /* the available space is now stored in page_room again.
  1788. sort pages according to the available space: */
  1789. *heapptr = AVL(AVLID,sort)(*heapptr);
  1790. }
  1791. }
  1792. for_each_cons_heap(heap, { heap->lastused = dummy_lastused; } );
  1793. /* treat .reserve?? */
  1794. #endif
  1795. CHECK_AVL_CONSISTENCY();
  1796. CHECK_GC_CONSISTENCY();
  1797. CHECK_GC_UNMARKED();
  1798. CHECK_NULLOBJ();
  1799. CHECK_GC_CACHE();
  1800. CHECK_GC_GENERATIONAL();
  1801. SAVE_GC_DATA();
  1802. CHECK_PACK_CONSISTENCY();
  1803. /* end of Garbage Collection. */
  1804. #ifdef HAVE_VADVISE
  1805. begin_system_call();
  1806. vadvise(VA_NORM); /* no Paging-behaviour becomes normal again */
  1807. end_system_call();
  1808. #endif
  1809. inc_gc_count(); /* count GCs */
  1810. /* detect occupied memory: */
  1811. #ifdef SPVW_PAGES
  1812. recalc_space(false);
  1813. #endif
  1814. gcend_space = used_space();
  1815. #ifdef SPVW_PAGES
  1816. mem.last_gcend_space = gcend_space;
  1817. /* we let the used space grow up to 25%, only then
  1818. the next GC is triggered: */
  1819. {
  1820. var uintM total_room = floor(mem.last_gcend_space,4);
  1821. if (total_room < 512*1024) { total_room = 512*1024; } /* at least 512 KB */
  1822. mem.gctrigger_space = mem.last_gcend_space + total_room;
  1823. }
  1824. #endif
  1825. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  1826. /* make_space() expects, that mem.total_room <= length of the big gap. */
  1827. #define set_total_room(space_used_now) \
  1828. { set_total_room_(space_used_now); \
  1829. if (mem.total_room > mem.conses.heap_start-mem.varobjects.heap_end) \
  1830. mem.total_room = mem.conses.heap_start-mem.varobjects.heap_end; \
  1831. }
  1832. #else
  1833. #define set_total_room set_total_room_
  1834. #endif
  1835. #if (defined(SPVW_PURE_BLOCKS) || defined(TRIVIALMAP_MEMORY)) && !defined(GENERATIONAL_GC)
  1836. /* we let the used space grow by up to 50%, only then
  1837. the next GC is triggered: */
  1838. #define set_total_room_(space_used_now) \
  1839. { mem.total_room = floor(space_used_now,2); /* 50% of the now used space */ \
  1840. if (mem.total_room < 512*1024) { mem.total_room = 512*1024; } /* at least 512 KB */ \
  1841. }
  1842. set_total_room(gcend_space);
  1843. #endif
  1844. #if defined(GENERATIONAL_GC)
  1845. /* we let the used space grow up to 25%, only then
  1846. the next GC is triggered: */
  1847. #define set_total_room_(space_used_now) \
  1848. { mem.total_room = floor(space_used_now,4); /* 25% of the now used space */ \
  1849. if (mem.total_room < 512*1024) { mem.total_room = 512*1024; } /* at least 512 KB */ \
  1850. }
  1851. {
  1852. var uintM gen0_sum = 0; /* current size of the old generation */
  1853. var uintM gen1_sum = 0; /* current size of the new generation */
  1854. for_each_heap(heap, {
  1855. gen0_sum += heap->heap_gen0_end - heap->heap_gen0_start;
  1856. });
  1857. #ifdef SPVW_MIXED_BLOCKS_OPPOSITE
  1858. gen1_sum += mem.varobjects.heap_end - mem.varobjects.heap_gen1_start;
  1859. gen1_sum += mem.conses.heap_gen1_end - mem.conses.heap_start;
  1860. #else
  1861. for_each_heap(heap, {
  1862. gen1_sum += heap->heap_end - heap->heap_gen1_start;
  1863. });
  1864. #endif
  1865. /* NB: gcend_space == gen0_sum + gen1_sum. */
  1866. set_total_room(gen0_sum);
  1867. mem.last_gcend_space0 = gen0_sum;
  1868. mem.last_gcend_space1 = gen1_sum;
  1869. }
  1870. #endif
  1871. {
  1872. var uintM freed = gcstart_space - gcend_space; /* freed memory by this GC */
  1873. inc_gc_space(freed); /* add this to the 64-Bit-Accu gc_space */
  1874. }
  1875. #ifdef SPVW_PAGES
  1876. free_some_unused_pages();
  1877. #endif
  1878. #if (defined(SINGLEMAP_MEMORY) || defined(TRIVIALMAP_MEMORY)) && defined(VIRTUAL_MEMORY) && defined(HAVE_MUNMAP)
  1879. /* free unused, empty pages, so that they do not have to moved
  1880. by the OS to the swap space: */
  1881. begin_system_call();
  1882. #ifndef SPVW_MIXED_BLOCKS_OPPOSITE
  1883. for_each_heap(heap, {
  1884. var aint needed_limit = round_up(heap->heap_end,map_pagesize);
  1885. if (needed_limit > round_up(heap->heap_limit,map_pagesize))
  1886. abort();
  1887. if (needed_limit < heap->heap_limit) {
  1888. if (munmap((void*)needed_limit,heap->heap_limit-needed_limit) < 0)
  1889. goto munmap_failure;
  1890. heap->heap_limit = needed_limit;
  1891. }
  1892. });
  1893. #else /* SPVW_MIXED_BLOCKS_OPPOSITE */
  1894. for_each_heap(heap, {
  1895. if (is_cons_heap(heapnr)) {
  1896. var aint needed_limit = round_down(heap->heap_start,map_pagesize);
  1897. if (needed_limit < heap->heap_limit)
  1898. abort();
  1899. if (needed_limit > heap->heap_limit) {
  1900. if (munmap((void*)heap->heap_limit,needed_limit-heap->heap_limit) < 0)
  1901. goto munmap_failure;
  1902. heap->heap_limit = needed_limit;
  1903. }
  1904. } else {
  1905. var aint needed_limit = round_up(heap->heap_end,map_pagesize);
  1906. if (needed_limit > round_up(heap->heap_limit,map_pagesize))
  1907. abort();
  1908. if (needed_limit < heap->heap_limit) {
  1909. if (munmap((void*)needed_limit,heap->heap_limit-needed_limit) < 0)
  1910. goto munmap_failure;
  1911. heap->heap_limit = needed_limit;
  1912. }
  1913. }
  1914. });
  1915. #endif
  1916. if (false) {
  1917. munmap_failure:
  1918. end_system_call();
  1919. fputs(GETTEXTL("munmap() failed."),stderr);
  1920. errno_out(OS_errno);
  1921. abort();
  1922. }
  1923. end_system_call();
  1924. #endif
  1925. /* add time used by this GC to the GC-total-time: */
  1926. gc_timer_off();
  1927. #ifdef GENERATIONAL_GC
  1928. O(gc_count) = fixnum_inc(O(gc_count),1); /* count GCs */
  1929. #endif
  1930. gc_signalblock_off(); /* release signals again */
  1931. clr_break_sem_1(); /* allow BREAK again */
  1932. }
  1933. /* end of one Garbage Collection.
  1934. can trigger GC! */
  1935. local maygc void gar_col_done (void)
  1936. {
  1937. #ifdef GC_CLOSES_FILES
  1938. close_some_files(O(files_to_close)); /* close previously unmarked files */
  1939. O(files_to_close) = NIL;
  1940. #endif
  1941. /* perform finalizer-functions: */
  1942. while (!(eq(O(pending_finalizers),Fixnum_0))) {
  1943. var object obj = O(pending_finalizers);
  1944. O(pending_finalizers) = TheFinalizer(obj)->fin_cdr;
  1945. pushSTACK(TheFinalizer(obj)->fin_trigger);
  1946. if (!boundp(TheFinalizer(obj)->fin_alive)) { /*(FUNCALL function trigger)*/
  1947. funcall(TheFinalizer(obj)->fin_function,1);
  1948. } else { /* (FUNCALL function trigger alive) */
  1949. pushSTACK(TheFinalizer(obj)->fin_alive);
  1950. funcall(TheFinalizer(obj)->fin_function,2);
  1951. }
  1952. }
  1953. }
  1954. #ifdef SPVW_PAGES
  1955. /* a little sorting-routine: */
  1956. #define SORTID spvw
  1957. #define SORT_ELEMENT Pages
  1958. #define SORT_KEY uintM
  1959. #define SORT_KEYOF(page) (page)->page_gcpriv.l
  1960. #define SORT_COMPARE(key1,key2) (sintL)((key1)-(key2))
  1961. #define SORT_LESS(key1,key2) ((key1) < (key2))
  1962. #include "sort.c"
  1963. #undef SORT_LESS
  1964. #undef SORT_COMPARE
  1965. #undef SORT_KEYOF
  1966. #undef SORT_KEY
  1967. #undef SORT_ELEMENT
  1968. #undef SORTID
  1969. /* list of pages, that have to be freed, as soon as the update
  1970. is completed: */
  1971. local var Page* delayed_pages = NULL;
  1972. /* insertion of a page in this list: */
  1973. #define free_page_later(page) \
  1974. { (page)->page_gcpriv.next = delayed_pages; delayed_pages = page; }
  1975. /* release of all pages in the list: */
  1976. #define free_delayed_pages() \
  1977. { var Page* page = delayed_pages; \
  1978. while (page!=NULL) { \
  1979. var Page* next = (Page*)page->page_gcpriv.next; \
  1980. free_page(page); \
  1981. page = next; \
  1982. } \
  1983. delayed_pages = NULL; \
  1984. }
  1985. /* compacting of a page by "decanting" into other pages of the same kind: */
  1986. #ifdef SPVW_PURE
  1987. local void gc_compact_from_varobject_page (Heap* heapptr, Page* page, uintL heapnr)
  1988. #else
  1989. local void gc_compact_from_varobject_page (Heap* heapptr, Page* page)
  1990. #endif
  1991. {
  1992. var aint p1 = page->page_start;
  1993. var aint p1end = page->page_end;
  1994. var_prepare_objsize;
  1995. {
  1996. var Pages new_page = EMPTY; /* Page, which is being filled */
  1997. var AVL(AVLID,stack) stack; /* path from the root to the page */
  1998. var aint p2; /* cache of new_page->page_end */
  1999. var uintM l2; /* cache of new_page->page_room */
  2000. /* try to copy all objects between p1 and p1end : */
  2001. while (1) {
  2002. if (p1==p1end) /* upper bound reached -> finished */
  2003. break;
  2004. var uintM laenge = objsize((Varobject)p1); /* determine byte-length */
  2005. /* search a page, that has still 'laenge' free bytes: */
  2006. if ((new_page == EMPTY) || (l2 < laenge)) {
  2007. if (new_page != EMPTY) { /* empty cache? */
  2008. new_page->page_end = p2;
  2009. new_page->page_room = l2;
  2010. AVL(AVLID,move)(&stack);
  2011. }
  2012. new_page = AVL(AVLID,least)(laenge,&heapptr->inuse,&stack);
  2013. if (new_page==EMPTY)
  2014. break;
  2015. new_page->page_gcpriv.d = -1L; /* mark new_page as "to be filled" */
  2016. p2 = new_page->page_end;
  2017. l2 = new_page->page_room;
  2018. }
  2019. var aint old_p1 = p1;
  2020. var aint old_p2 = p2;
  2021. /* copy the object: */
  2022. l2 -= laenge; move_aligned_p1_p2(laenge);
  2023. /* leave a pointer to the new position: */
  2024. *(gcv_object_t*)old_p1 = with_mark_bit(pointer_as_object(old_p2));
  2025. /* p1 = source address for the next object */
  2026. }
  2027. if (new_page != EMPTY) { /* empty cache? */
  2028. new_page->page_end = p2;
  2029. new_page->page_room = l2;
  2030. AVL(AVLID,move)(&stack);
  2031. }
  2032. }
  2033. /* the not copied objects experience a constant shift downward: */
  2034. {
  2035. var aint p2 = page->page_start;
  2036. page->page_gcpriv.d = p1 - p2; /* shift */
  2037. page->page_start = p1; /* current start of the page */
  2038. if (p1!=p2) /* if shift is necessary */
  2039. while (p1!=p1end) { /* upper bound reached -> finished */
  2040. var uintM laenge = objsize((Varobject)p1); /* calculate byte-length */
  2041. #ifdef TYPECODES
  2042. var tint flags = mtypecode(((Varobject)p1)->GCself); /* save typeinfo (and flags for symbols) retten */
  2043. #endif
  2044. set_GCself(p1, flags,p2); /* store new address, with old typeinfo */
  2045. mark(p1); /* with mark bit */
  2046. p1 += laenge; p2 += laenge;
  2047. }
  2048. }
  2049. }
  2050. local void gc_compact_from_cons_page (Heap* heapptr, Page* page)
  2051. {
  2052. var aint p1 = page->page_end;
  2053. var aint p1start = page->page_start;
  2054. {
  2055. var Pages new_page = EMPTY; /* page, which is filled */
  2056. var AVL(AVLID,stack) stack; /* path from the root to the page */
  2057. var aint p2; /* cache of new_page->page_end */
  2058. var uintM l2; /* cache of new_page->page_room */
  2059. /* try to copy all objects between p1start and p1: */
  2060. while (1) {
  2061. if (p1==p1start) /* lower bound reached -> finished */
  2062. break;
  2063. /* search a page, that has at least sizeof(cons_) bytes free: */
  2064. if ((new_page == EMPTY) || (l2 == 0)) { /* l2 < sizeof(cons_) means l2 = 0 */
  2065. if (new_page != EMPTY) { /* empty cache? */
  2066. new_page->page_end = p2;
  2067. new_page->page_room = l2;
  2068. AVL(AVLID,move)(&stack);
  2069. }
  2070. new_page = AVL(AVLID,least)(sizeof(cons_),&heapptr->inuse,&stack);
  2071. if (new_page==EMPTY)
  2072. break;
  2073. new_page->page_gcpriv.d = -1L; /* mark new_page as "to be filled" */
  2074. p2 = new_page->page_end;
  2075. l2 = new_page->page_room;
  2076. }
  2077. p1 -= sizeof(cons_); /* p1 = source address for next object */
  2078. /* copy the object: */
  2079. ((gcv_object_t*)p2)[0] = ((gcv_object_t*)p1)[0];
  2080. ((gcv_object_t*)p2)[1] = ((gcv_object_t*)p1)[1];
  2081. /* leave a pointer to the new position: */
  2082. *(gcv_object_t*)p1 = with_mark_bit(pointer_as_object(p2));
  2083. p2 += sizeof(cons_); l2 -= sizeof(cons_);
  2084. }
  2085. if (new_page != EMPTY) { /* empty cache? */
  2086. new_page->page_end = p2;
  2087. new_page->page_room = l2;
  2088. AVL(AVLID,move)(&stack);
  2089. }
  2090. }
  2091. /* the not copied objects remain on the spot. */
  2092. page->page_gcpriv.d = page->page_end - p1; /* gain */
  2093. page->page_end = p1; /* current end of the page */
  2094. }
  2095. /* compacting of all pages of a certain kind: */
  2096. #ifdef SPVW_PURE
  2097. local void gc_compact_heap (Heap* heapptr, sintB heaptype, uintL heapnr)
  2098. #else
  2099. local void gc_compact_heap (Heap* heapptr, sintB heaptype)
  2100. #endif
  2101. {
  2102. /* first, create a list of all pages, sorted ascending
  2103. according to the number of occupied bytes: */
  2104. var uintL pagecount = 0;
  2105. map_heap(*heapptr,page,
  2106. { page->page_gcpriv.l = page->page_end - page->page_start; /* number of occupied bytes */
  2107. pagecount++;
  2108. });
  2109. /* pagecount = number of pages. */
  2110. var DYNAMIC_ARRAY(pages_sorted,Pages,pagecount);
  2111. {
  2112. var uintL index = 0;
  2113. map_heap(*heapptr,page, { pages_sorted[index++] = page; } );
  2114. }
  2115. /* pages_sorted = Array of pages. */
  2116. SORT(spvw,sort)(pages_sorted,pagecount);
  2117. /* pages_sorted = Array of pages, sorted according to number
  2118. of occupied bytes.
  2119. In each page, page_gcpriv.d means the shift downwards,
  2120. that must occur to the page in Phase 3 (>=0).
  2121. page_gcpriv.d = -1L for the pages to be filled.
  2122. page_gcpriv.d = -2L for the yet untreated pages. */
  2123. map_heap(*heapptr,page, { page->page_gcpriv.d = -2L; } ); /* all pages still untreated */
  2124. {
  2125. var uintL index;
  2126. for (index=0; index<pagecount; index++) { /* peruse all pages */
  2127. var Pages page = pages_sorted[index]; /* next page */
  2128. if (page->page_gcpriv.d == -2L) {
  2129. /* still untreated and not yet marked as "to be filled"?
  2130. page is being emptied. */
  2131. heapptr->inuse = AVL(AVLID,delete1)(page,heapptr->inuse); /* take out page */
  2132. /* empty page: */
  2133. if (heaptype==0)
  2134. gc_compact_from_cons_page(heapptr,page);
  2135. else
  2136. #ifdef SPVW_PURE
  2137. gc_compact_from_varobject_page(heapptr,page,heapnr);
  2138. #else
  2139. gc_compact_from_varobject_page(heapptr,page);
  2140. #endif
  2141. }
  2142. }
  2143. }
  2144. CHECK_AVL_CONSISTENCY();
  2145. CHECK_GC_CONSISTENCY_2();
  2146. {
  2147. var uintL index;
  2148. for (index=0; index<pagecount; index++) { /* peruse all pages */
  2149. var Pages page = pages_sorted[index]; /* next page */
  2150. if (page->page_gcpriv.d != -1L) { /* a page to be emptied */
  2151. page->page_room += page->page_gcpriv.d; /* room, we have created now */
  2152. if (page->page_start == page->page_end) {
  2153. /* page completely emptied
  2154. free page: */
  2155. if (page->m_length > min_page_size_brutto) {
  2156. /* ultralarge page */
  2157. free_page_later(page); /* return to OS later */
  2158. } else {
  2159. /* normal large page
  2160. keep; page->page_room remains the same!
  2161. insert into the pool mem.free_pages: */
  2162. page->page_gcpriv.next = mem.free_pages;
  2163. mem.free_pages = page;
  2164. }
  2165. } else {
  2166. /* Page could not be emptied entirely */
  2167. heapptr->inuse = AVL(AVLID,insert1)(page,heapptr->inuse); /* insert page again */
  2168. }
  2169. }
  2170. }
  2171. }
  2172. FREE_DYNAMIC_ARRAY(pages_sorted);
  2173. CHECK_AVL_CONSISTENCY();
  2174. CHECK_GC_CONSISTENCY_2();
  2175. }
  2176. /* perform compacting Garbage Collection.
  2177. Is called, after gar_col_simple() could not get sufficient room
  2178. in one piece.
  2179. Note: This function does not garbage collect anything; it only reorganizes
  2180. the existing objects in fewer pages. Therefore it does not need to be
  2181. wrapped in with_gc_statistics() calls like do_gar_col_simple and do_gar_col. */
  2182. local maygc void gar_col_compact (void)
  2183. {
  2184. /* Lisp-objects from almost empty pages are filled into other pages,
  2185. in order to return those now empty pages.
  2186. 1. For each kind of page:
  2187. divide pages in pages to be emptied and pages to be filled and
  2188. copy as many data as possible from the to be emptied pages into
  2189. the pages to be filled. If a page cannot be emptied entirely,
  2190. leave it as it is, and within it move the remaining data
  2191. just downwards.
  2192. return of the completely empty pages.
  2193. 2. update of pointers.
  2194. 3. execution of the relocations into the not entirely emptied pages. */
  2195. set_break_sem_1(); /* disable BREAK during Garbage Collection */
  2196. gc_signalblock_on(); /* disable signals during Garbage Collection */
  2197. gc_timer_on();
  2198. CHECK_GC_UNMARKED(); CHECK_NULLOBJ();
  2199. inside_gc = true;
  2200. {
  2201. var uintL heapnr;
  2202. for (heapnr=0; heapnr<heapcount; heapnr++)
  2203. if (!is_unused_heap(heapnr))
  2204. #ifdef SPVW_PURE
  2205. gc_compact_heap(&mem.heaps[heapnr],mem.heaptype[heapnr],heapnr);
  2206. #endif
  2207. #ifdef SPVW_MIXED
  2208. gc_compact_heap(&mem.heaps[heapnr],1-heapnr);
  2209. #endif
  2210. }
  2211. /* update phase:
  2212. The entire LISP-memory is perused and old addresses
  2213. are replaced with new ones.
  2214. peruse all LISP-objects and update:
  2215. Update pointers in the LISP-stacks: */
  2216. update_STACKs();
  2217. /* Update pointers in the C stacks: */
  2218. update_back_traces();
  2219. /* Update program constants: */
  2220. update_tables();
  2221. /* Update pointers in the cons-cells: */
  2222. #define update_conspage update_conspage_normal
  2223. update_conses();
  2224. #undef update_conspage
  2225. /* update pointers in the objects of variable length: */
  2226. #define update_page(page,updater) \
  2227. { var aint ptr = page->page_start; \
  2228. var aint ptrend = page->page_end; \
  2229. /* peruse all objects with address >=ptr, <ptrend : */ \
  2230. while (ptr!=ptrend) { /* until ptr has reached the end */ \
  2231. /* peruse next object with address ptr (< ptrend) : */ \
  2232. updater(typecode_at(ptr) & ~bit(garcol_bit_t)); /* and advance */ \
  2233. } \
  2234. }
  2235. #ifdef GENERATIONAL_GC
  2236. #define update_hashtable_invalid false
  2237. #else
  2238. #define update_hashtable_invalid true
  2239. #endif
  2240. #define update_unrealloc false
  2241. #define update_fpointer_invalid false
  2242. #define update_fsubr_function false
  2243. #define update_ht_invalid set_ht_invalid_if_needed
  2244. #define update_ss_unrealloc(ptr)
  2245. #define update_in_unrealloc(ptr)
  2246. #define update_fp_invalid mark_fp_invalid
  2247. #define update_fs_function(ptr)
  2248. update_varobjects();
  2249. #undef update_fs_function
  2250. #undef update_fp_invalid
  2251. #undef update_in_unrealloc
  2252. #undef update_ss_unrealloc
  2253. #undef update_ht_invalid
  2254. #undef update_fsubr_function
  2255. #undef update_fpointer_invalid
  2256. #undef update_unrealloc
  2257. #undef update_hashtable_invalid
  2258. #undef update_page
  2259. /* execution of the relocations in the not entirely emptied pages: */
  2260. for_each_varobject_page(page, {
  2261. if (page->page_gcpriv.d != -1L) {
  2262. var aint p1 = page->page_start;
  2263. var aint p1end = page->page_end;
  2264. var aint p2 = p1 - page->page_gcpriv.d;
  2265. if (p1!=p2) { /* if relocation is necessary */
  2266. var_prepare_objsize;
  2267. page->page_start = p2;
  2268. while (p1!=p1end) { /* upper bound reached -> finished */
  2269. /* next object has address p1, is marked */
  2270. unmark(p1); /* delete mark */
  2271. /* retain object and relocate: */
  2272. var uintM count = objsize((Varobject)p1); /* length (divisible by varobject_alignment, >0) */
  2273. move_aligned_p1_p2(count); /* relocate and advance */
  2274. }
  2275. page->page_end = p2;
  2276. }
  2277. }
  2278. });
  2279. for_each_cons_heap(heap, { heap->lastused = dummy_lastused; } );
  2280. recalc_space(true);
  2281. free_delayed_pages();
  2282. free_some_unused_pages();
  2283. inside_gc = false;
  2284. CHECK_AVL_CONSISTENCY();
  2285. CHECK_GC_CONSISTENCY();
  2286. CHECK_GC_UNMARKED(); CHECK_NULLOBJ();
  2287. CHECK_PACK_CONSISTENCY();
  2288. gc_timer_off();
  2289. gc_signalblock_off(); /* release signals again */
  2290. clr_break_sem_1(); /* allow BREAK again */
  2291. }
  2292. #endif
  2293. /* perform Garbage Collection: */
  2294. local maygc void gar_col_simple (void);
  2295. local void do_gar_col_simple (void)
  2296. {
  2297. #ifdef NOCOST_SP_CHECK
  2298. /* Better flag a stack overflow before GC than during GC. (If the
  2299. stack overflow handler is called during GC, a crash is unavoidable.) */
  2300. if (near_SP_overflow()) SP_ueber();
  2301. #endif
  2302. #if !defined(GENERATIONAL_GC)
  2303. gar_col_normal();
  2304. #ifdef SPVW_PAGES
  2305. #if defined(UNIX) || defined(WIN32)
  2306. /* if the allocated, but unoccupied memory in pages
  2307. comprises more than 25% of what is occupied, compacting
  2308. is worthwhile, because a half-empty page costs the
  2309. operating system just as much as a full page: */
  2310. if (free_space() > floor(mem.last_gcend_space,4)) {
  2311. gar_col_compact(); mem.last_gc_compacted = true;
  2312. } else
  2313. #endif
  2314. mem.last_gc_compacted = false;
  2315. #endif
  2316. #else /* defined(GENERATIONAL_GC) */
  2317. /* If after the last GC the objects in the new generation
  2318. amount to more than 25% of the objects in the old generation,
  2319. then we will perform a full Garbage-Collection this time (both
  2320. generations at once.) */
  2321. if (mem.last_gcend_space1 > floor(mem.last_gcend_space0,4)) {
  2322. generation = 0; gar_col_normal(); mem.last_gc_full = true;
  2323. } else {
  2324. generation = 1; gar_col_normal(); mem.last_gc_full = false;
  2325. }
  2326. #endif
  2327. gar_col_done();
  2328. }
  2329. local maygc void gar_col_simple()
  2330. {
  2331. var uintC saved_mv_count = mv_count; /* save mv_count */
  2332. with_gc_statistics(&do_gar_col_simple); /* GC and statistics */
  2333. mv_count = saved_mv_count; /* restore mv_count */
  2334. }
  2335. /* perform full Garbage Collection: */
  2336. global maygc void gar_col (int level);
  2337. local void do_gar_col (void)
  2338. {
  2339. #ifdef NOCOST_SP_CHECK
  2340. /* Better flag a stack overflow before GC than during GC. (If the
  2341. stack overflow handler is called during GC, a crash is unavoidable.) */
  2342. if (near_SP_overflow()) SP_ueber();
  2343. #endif
  2344. #if !defined(GENERATIONAL_GC)
  2345. gar_col_normal();
  2346. #ifdef SPVW_PAGES
  2347. gar_col_compact(); mem.last_gc_compacted = true;
  2348. #endif
  2349. #else /* defined(GENERATIONAL_GC) */
  2350. generation = 0; gar_col_normal(); mem.last_gc_full = true;
  2351. #endif
  2352. gar_col_done();
  2353. }
  2354. global maygc void gar_col(int level)
  2355. {
  2356. var uintC saved_mv_count = mv_count; /* save mv_count */
  2357. #if defined(USE_JITC)
  2358. gc_drop_jitc = (level==1);
  2359. #endif
  2360. with_gc_statistics(&do_gar_col); /* GC and statistics */
  2361. #if defined(USE_JITC)
  2362. gc_drop_jitc = false;
  2363. #endif
  2364. mv_count = saved_mv_count; /* restore mv_count */
  2365. }
  2366. /* Macro update is now unnecessary: */
  2367. #undef update_stackobj
  2368. #undef update
  2369. #if defined(SPVW_MIXED_BLOCKS_OPPOSITE) && RESERVE
  2370. /* For Reorganization of the object memory after GC or before and after EXECUTE:
  2371. sub-program for relocation of the conses.
  2372. move_conses(delta);
  2373. the reserve memory is shrinked by delta bytes (divisible by
  2374. varobject_alignment), the conses are shifted upwards by delta bytes. */
  2375. local void move_conses (sintM delta)
  2376. {
  2377. if (delta==0) /* no relocation necessary? */
  2378. return;
  2379. set_break_sem_1(); /* disable BREAK */
  2380. gc_signalblock_on(); /* disable signals */
  2381. gc_timer_on();
  2382. if (delta>0) {
  2383. /* shift upwards, from above */
  2384. var gcv_object_t* source = (gcv_object_t*) mem.conses.heap_end;
  2385. var gcv_object_t* source_end = (gcv_object_t*) mem.conses.heap_start;
  2386. #if !(defined(MIPS) && !defined(GNU))
  2387. var gcv_object_t* dest = (gcv_object_t*) (mem.conses.heap_end += delta);
  2388. #else /* circumvent IRIX 4 "cc -ansi" compiler-bug?? */
  2389. var gcv_object_t* dest = (mem.conses.heap_end += delta, (gcv_object_t*)mem.conses.heap_end);
  2390. #endif
  2391. mem.conses.heap_start += delta;
  2392. while (source!=source_end) {
  2393. *--dest = *--source; /* copy an entire cons upwards */
  2394. *--dest = *--source;
  2395. }
  2396. } else { /* delta<0 */
  2397. /* shift downwards, from below */
  2398. var gcv_object_t* source = (gcv_object_t*) mem.conses.heap_start;
  2399. var gcv_object_t* source_end = (gcv_object_t*) mem.conses.heap_end;
  2400. #if !(defined(MIPS) && !defined(GNU))
  2401. var gcv_object_t* dest = (gcv_object_t*) (mem.conses.heap_start += delta);
  2402. #else /* circumvent IRIX 4 "cc -ansi" compiler-bug?? */
  2403. var gcv_object_t* dest = (mem.conses.heap_start += delta, (gcv_object_t*)mem.conses.heap_start);
  2404. #endif
  2405. mem.conses.heap_end += delta;
  2406. while (source!=source_end) {
  2407. *dest++ = *source++; /* copy an entire cons downwards */
  2408. *dest++ = *source++;
  2409. }
  2410. }
  2411. /* update pointers to conses and similar: */
  2412. {
  2413. var soint odelta = (soint)delta<<(oint_addr_shift-addr_shift); /* Offset in the oint */
  2414. /* The entire LISP-memory is perused and old addresses
  2415. are replaced with new ones.
  2416. update of an object *objptr : */
  2417. #ifdef TYPECODES
  2418. #define update(objptr) \
  2419. { switch (mtypecode(*(gcv_object_t*)(objptr))) { \
  2420. case_pair: /* Two-Pointer-Object? */ \
  2421. *(gcv_object_t*)(objptr) = as_object(as_oint(*(gcv_object_t*)(objptr)) + odelta); \
  2422. break; \
  2423. default: break; \
  2424. }}
  2425. #else
  2426. #define update(objptr) \
  2427. { if (consp(*(gcv_object_t*)(objptr))) \
  2428. *(gcv_object_t*)(objptr) = as_object(as_oint(*(gcv_object_t*)(objptr)) + odelta); \
  2429. }
  2430. #endif
  2431. /* peruse all LISP-objects and update:
  2432. Update pointers in all LISP-stacks: */
  2433. #define update_stackobj update_stackobj_normal
  2434. update_STACKs();
  2435. #undef update_stackobj
  2436. /* Update pointers in all C stacks: */
  2437. update_back_traces();
  2438. /* Update program constants: */
  2439. update_tables();
  2440. /* update pointers in the Cons-cells: */
  2441. #define update_conspage update_conspage_normal
  2442. update_conses();
  2443. #undef update_conspage
  2444. /* update pointers in the objects of variable length: */
  2445. #define update_page update_page_normal
  2446. #ifdef GENERATIONAL_GC
  2447. #define update_hashtable_invalid false
  2448. #else
  2449. #define update_hashtable_invalid true
  2450. #endif
  2451. #define update_unrealloc false
  2452. #define update_fpointer_invalid false
  2453. #define update_fsubr_function false
  2454. #define update_ht_invalid set_ht_invalid_if_needed
  2455. #define update_ss_unrealloc(ptr)
  2456. #define update_in_unrealloc(ptr)
  2457. #define update_fp_invalid mark_fp_invalid
  2458. #define update_fs_function(ptr)
  2459. update_varobjects();
  2460. #undef update_fs_function
  2461. #undef update_fp_invalid
  2462. #undef update_in_unrealloc
  2463. #undef update_ss_unrealloc
  2464. #undef update_ht_invalid
  2465. #undef update_fsubr_function
  2466. #undef update_fpointer_invalid
  2467. #undef update_unrealloc
  2468. #undef update_hashtable_invalid
  2469. #undef update_page
  2470. /* Macro update is now unnecessary: */
  2471. #undef update
  2472. }
  2473. /* End of relocation and update.
  2474. add needed time to GC-total-time: */
  2475. gc_timer_off();
  2476. gc_signalblock_off(); /* release signals again */
  2477. clr_break_sem_1(); /* allow BREAK again */
  2478. }
  2479. #endif