PageRenderTime 122ms CodeModel.GetById 26ms RepoModel.GetById 1ms app.codeStats 0ms

/MicroFrameworkPK_v4_2/CLR/Core/GarbageCollector_Compaction.cpp

https://github.com/pmfsampaio/NETMF-LPC
C++ | 455 lines | 311 code | 109 blank | 35 comment | 48 complexity | b2213f3e169d18936203925d9e08c8bc MD5 | raw file
  1. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  2. // Copyright (c) Microsoft Corporation. All rights reserved.
  3. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  4. #include "Core.h"
  5. ////////////////////////////////////////////////////////////////////////////////////////////////////
  6. CLR_UINT32 CLR_RT_GarbageCollector::ExecuteCompaction()
  7. {
  8. NATIVE_PROFILE_CLR_CORE();
  9. #if defined(TINYCLR_PROFILE_NEW_ALLOCATIONS)
  10. g_CLR_PRF_Profiler.RecordHeapCompactionBegin();
  11. #endif
  12. #if defined(TINYCLR_TRACE_MEMORY_STATS)
  13. if(s_CLR_RT_fTrace_MemoryStats >= c_CLR_RT_Trace_Info)
  14. {
  15. CLR_Debug::Printf( "GC: performing heap compaction...\r\n" );
  16. }
  17. #endif
  18. ////////////////////////////////////////////////////////////////////////////////////////////////
  19. CLR_RT_ExecutionEngine::ExecutionConstraint_Suspend();
  20. Heap_Compact();
  21. CLR_RT_ExecutionEngine::ExecutionConstraint_Resume();
  22. m_numberOfCompactions++;
  23. ////////////////////////////////////////////////////////////////////////////////////////////////
  24. #if defined(TINYCLR_PROFILE_NEW_ALLOCATIONS)
  25. g_CLR_PRF_Profiler.RecordHeapCompactionEnd();
  26. #endif
  27. return 0;
  28. }
  29. ////////////////////////////////////////////////////////////////////////////////
  30. void CLR_RT_GarbageCollector::Heap_Compact()
  31. {
  32. NATIVE_PROFILE_CLR_CORE();
  33. ValidatePointers();
  34. //--//
  35. RelocationRegion relocHelper[ c_minimumSpaceForCompact ];
  36. const size_t relocMax = ARRAYSIZE(relocHelper);
  37. Heap_Relocate_Prepare( relocHelper, relocMax );
  38. RelocationRegion* relocBlocks = relocHelper;
  39. RelocationRegion* relocCurrent = relocBlocks;
  40. //--//
  41. TestPointers_PopulateOld();
  42. CLR_RT_HeapCluster* freeRegion_hc = NULL;;
  43. CLR_RT_HeapBlock_Node* freeRegion = NULL;
  44. CLR_RT_HeapCluster* currentSource_hc = (CLR_RT_HeapCluster*)g_CLR_RT_ExecutionEngine.m_heap.FirstNode();
  45. while(currentSource_hc->Next())
  46. {
  47. CLR_RT_HeapBlock_Node* currentSource = currentSource_hc->m_payloadStart;
  48. CLR_RT_HeapBlock_Node* currentSource_end = currentSource_hc->m_payloadEnd;
  49. if(!freeRegion)
  50. {
  51. //
  52. // Move to the next first free region.
  53. //
  54. freeRegion_hc = (CLR_RT_HeapCluster*)g_CLR_RT_ExecutionEngine.m_heap.FirstNode();
  55. while(true)
  56. {
  57. CLR_RT_HeapCluster* freeRegion_hcNext = (CLR_RT_HeapCluster*)freeRegion_hc->Next(); if(!freeRegion_hcNext) break;
  58. freeRegion = freeRegion_hc->m_freeList.FirstNode(); if(freeRegion->Next()) break;
  59. freeRegion = NULL;
  60. freeRegion_hc = freeRegion_hcNext;
  61. }
  62. if(!freeRegion) break;
  63. }
  64. while(true)
  65. {
  66. //
  67. // We can only move backward.
  68. //
  69. if(currentSource < freeRegion)
  70. {
  71. currentSource_hc = freeRegion_hc;
  72. currentSource = freeRegion;
  73. currentSource_end = freeRegion_hc->m_payloadEnd;
  74. }
  75. while(currentSource < currentSource_end && currentSource->IsFlagSet( CLR_RT_HeapBlock::HB_Unmovable ))
  76. {
  77. currentSource += currentSource->DataSize();
  78. }
  79. if(currentSource == currentSource_end) break;
  80. //////////////////////////////////////////////////////
  81. //
  82. // At this point, we have at least ONE movable block.
  83. //
  84. //////////////////////////////////////////////////////
  85. #if TINYCLR_VALIDATE_HEAP >= TINYCLR_VALIDATE_HEAP_4_CompactionPlus
  86. if(IsBlockInFreeList( g_CLR_RT_ExecutionEngine.m_heap, freeRegion, true ) == false)
  87. {
  88. CLR_Debug::Printf( "'freeRegion' is not in a free list!! %08x\r\n", freeRegion );
  89. TINYCLR_DEBUG_STOP();
  90. }
  91. if(IsBlockInFreeList( g_CLR_RT_ExecutionEngine.m_heap, currentSource, false ))
  92. {
  93. CLR_Debug::Printf( "'currentSource' is in a free list!! %08x\r\n", currentSource );
  94. TINYCLR_DEBUG_STOP();
  95. }
  96. #endif
  97. if(m_relocCount >= relocMax)
  98. {
  99. ValidateHeap( g_CLR_RT_ExecutionEngine.m_heap );
  100. Heap_Relocate();
  101. ValidateHeap( g_CLR_RT_ExecutionEngine.m_heap );
  102. relocBlocks = m_relocBlocks;
  103. relocCurrent = relocBlocks;
  104. TestPointers_PopulateOld();
  105. }
  106. {
  107. CLR_UINT32 move = 0;
  108. CLR_UINT32 freeRegion_Size = freeRegion->DataSize();
  109. bool fSlide;
  110. relocCurrent->m_destination = (CLR_UINT8*)freeRegion;
  111. relocCurrent->m_start = (CLR_UINT8*)currentSource;
  112. relocCurrent->m_offset = (CLR_UINT32)(relocCurrent->m_destination - relocCurrent->m_start);
  113. //
  114. // Are the free block and the last moved block adjacent?
  115. //
  116. if(currentSource == freeRegion + freeRegion_Size)
  117. {
  118. while(currentSource < currentSource_end && currentSource->IsFlagSet( CLR_RT_HeapBlock::HB_Unmovable ) == false)
  119. {
  120. CLR_UINT32 len = currentSource->DataSize();
  121. currentSource += len;
  122. move += len;
  123. }
  124. fSlide = true;
  125. }
  126. else
  127. {
  128. while(freeRegion_Size && currentSource < currentSource_end && currentSource->IsFlagSet( CLR_RT_HeapBlock::HB_Unmovable ) == false)
  129. {
  130. CLR_UINT32 len = currentSource->DataSize();
  131. if(freeRegion_Size < len)
  132. {
  133. break;
  134. }
  135. freeRegion_Size -= len;
  136. currentSource += len;
  137. move += len;
  138. }
  139. fSlide = false;
  140. }
  141. if(move)
  142. {
  143. //
  144. // Skip forward to the next movable block.
  145. //
  146. while(currentSource < currentSource_end && currentSource->IsFlagSet( CLR_RT_HeapBlock::HB_Unmovable ))
  147. {
  148. currentSource += currentSource->DataSize();
  149. }
  150. CLR_UINT32 moveBytes = move * sizeof(*currentSource);
  151. relocCurrent->m_end = relocCurrent->m_start + moveBytes;
  152. //--//
  153. //
  154. // Remove the old free block, copy the data, recreate the new free block.
  155. // Merge with the following one if they are adjacent now.
  156. //
  157. CLR_RT_HeapBlock_Node* freeRegionNext = freeRegion->Next();
  158. freeRegion->Unlink();
  159. memmove( relocCurrent->m_destination, relocCurrent->m_start, moveBytes );
  160. if(freeRegion_Size)
  161. {
  162. freeRegion = freeRegion_hc->InsertInOrder( freeRegion + move, freeRegion_Size );
  163. }
  164. else
  165. {
  166. freeRegion = freeRegionNext;
  167. }
  168. if(fSlide == false)
  169. {
  170. CLR_RT_HeapBlock_Node* dst = currentSource_hc->InsertInOrder( (CLR_RT_HeapBlock_Node*)relocCurrent->m_start, move );
  171. if(dst < freeRegion && freeRegion < (dst + dst->DataSize()))
  172. {
  173. freeRegion = dst;
  174. }
  175. }
  176. CLR_RT_GarbageCollector::ValidateCluster( currentSource_hc );
  177. CLR_RT_GarbageCollector::ValidateCluster( freeRegion_hc );
  178. relocCurrent++;
  179. m_relocCount++;
  180. }
  181. else
  182. {
  183. freeRegion = freeRegion->Next();
  184. }
  185. if(freeRegion->Next() == NULL)
  186. {
  187. freeRegion = NULL;
  188. freeRegion_hc = (CLR_RT_HeapCluster*)freeRegion_hc->Next();
  189. while(true)
  190. {
  191. CLR_RT_HeapCluster* freeRegion_hcNext = (CLR_RT_HeapCluster*)freeRegion_hc->Next(); if(!freeRegion_hcNext) break;
  192. freeRegion = freeRegion_hc->m_freeList.FirstNode(); if(freeRegion->Next()) break;
  193. freeRegion = NULL;
  194. freeRegion_hc = freeRegion_hcNext;
  195. }
  196. if(!freeRegion) break;
  197. }
  198. }
  199. }
  200. currentSource_hc = (CLR_RT_HeapCluster*)currentSource_hc->Next();
  201. }
  202. if(m_relocCount)
  203. {
  204. ValidateHeap( g_CLR_RT_ExecutionEngine.m_heap );
  205. Heap_Relocate();
  206. ValidateHeap( g_CLR_RT_ExecutionEngine.m_heap );
  207. }
  208. }
  209. void CLR_RT_GarbageCollector::Heap_Relocate_Prepare( RelocationRegion* blocks, size_t total )
  210. {
  211. NATIVE_PROFILE_CLR_CORE();
  212. m_relocBlocks = blocks;
  213. m_relocTotal = total;
  214. m_relocCount = 0;
  215. }
  216. void CLR_RT_GarbageCollector::Heap_Relocate_AddBlock( CLR_UINT8* dst, CLR_UINT8* src, CLR_UINT32 length )
  217. {
  218. NATIVE_PROFILE_CLR_CORE();
  219. RelocationRegion* reloc = m_relocBlocks;
  220. size_t count = m_relocCount;
  221. while(count)
  222. {
  223. if(reloc->m_start > src)
  224. {
  225. //
  226. // Insert region, so they are sorted by start address.
  227. //
  228. memmove( &reloc[ 1 ], &reloc[ 0 ], count * sizeof(*reloc) );
  229. break;
  230. }
  231. reloc++;
  232. count--;
  233. }
  234. reloc->m_start = src;
  235. reloc->m_end = &src[ length ];
  236. reloc->m_destination = dst;
  237. reloc->m_offset = (CLR_UINT32)(dst - src);
  238. if(++m_relocCount == m_relocTotal)
  239. {
  240. Heap_Relocate();
  241. }
  242. }
  243. void CLR_RT_GarbageCollector::Heap_Relocate()
  244. {
  245. NATIVE_PROFILE_CLR_CORE();
  246. if(m_relocCount)
  247. {
  248. RelocationRegion* relocBlocks = m_relocBlocks;
  249. CLR_UINT8* relocMinimum = relocBlocks->m_start;
  250. CLR_UINT8* relocMaximum = relocBlocks->m_end;
  251. for(size_t i=0; i<m_relocCount; i++, relocBlocks++)
  252. {
  253. if(relocMinimum > relocBlocks->m_start) relocMinimum = relocBlocks->m_start;
  254. if(relocMaximum < relocBlocks->m_end ) relocMaximum = relocBlocks->m_end;
  255. }
  256. m_relocMinimum = relocMinimum;
  257. m_relocMaximum = relocMaximum;
  258. TestPointers_Remap();
  259. Heap_Relocate_Pass( NULL );
  260. #if defined(TINYCLR_PROFILE_NEW_ALLOCATIONS)
  261. g_CLR_PRF_Profiler.TrackObjectRelocation();
  262. #endif
  263. ValidatePointers();
  264. TestPointers_PopulateNew();
  265. m_relocCount = 0;
  266. }
  267. }
  268. void CLR_RT_GarbageCollector::Heap_Relocate_Pass( RelocateFtn ftn )
  269. {
  270. NATIVE_PROFILE_CLR_CORE();
  271. #if TINYCLR_VALIDATE_HEAP > TINYCLR_VALIDATE_HEAP_0_None
  272. m_relocWorker = ftn;
  273. #endif
  274. TINYCLR_FOREACH_NODE(CLR_RT_HeapCluster,hc,g_CLR_RT_ExecutionEngine.m_heap)
  275. {
  276. CLR_RT_HeapBlock_Node* ptr = hc->m_payloadStart;
  277. CLR_RT_HeapBlock_Node* end = hc->m_payloadEnd;
  278. while(ptr < end)
  279. {
  280. CLR_RT_HEAPBLOCK_RELOCATE(ptr);
  281. ptr += ptr->DataSize();
  282. }
  283. }
  284. TINYCLR_FOREACH_NODE_END();
  285. g_CLR_RT_ExecutionEngine.Relocate();
  286. }
  287. //--//
  288. void CLR_RT_GarbageCollector::Heap_Relocate( CLR_RT_HeapBlock* lst, CLR_UINT32 len )
  289. {
  290. NATIVE_PROFILE_CLR_CORE();
  291. while(len--)
  292. {
  293. CLR_RT_HEAPBLOCK_RELOCATE(lst);
  294. lst++;
  295. }
  296. }
  297. void CLR_RT_GarbageCollector::Heap_Relocate( void** ref )
  298. {
  299. NATIVE_PROFILE_CLR_CORE();
  300. CLR_UINT8* dst = (CLR_UINT8*)*ref;
  301. #if TINYCLR_VALIDATE_HEAP > TINYCLR_VALIDATE_HEAP_0_None
  302. if(g_CLR_RT_GarbageCollector.m_relocWorker)
  303. {
  304. g_CLR_RT_GarbageCollector.m_relocWorker( ref );
  305. }
  306. else
  307. #endif
  308. {
  309. if(dst >= g_CLR_RT_GarbageCollector.m_relocMinimum && dst < g_CLR_RT_GarbageCollector.m_relocMaximum)
  310. {
  311. RelocationRegion* relocBlocks = g_CLR_RT_GarbageCollector.m_relocBlocks;
  312. size_t left = 0;
  313. size_t right = g_CLR_RT_GarbageCollector.m_relocCount;
  314. while(left < right)
  315. {
  316. size_t center = (left + right) / 2;
  317. RelocationRegion& relocCurrent = relocBlocks[ center ];
  318. if(dst < relocCurrent.m_start)
  319. {
  320. right = center;
  321. }
  322. else if(dst >= relocCurrent.m_end)
  323. {
  324. left = center+1;
  325. }
  326. else
  327. {
  328. *ref = (void*)(dst + relocCurrent.m_offset);
  329. return;
  330. }
  331. }
  332. }
  333. }
  334. }
  335. #if TINYCLR_VALIDATE_HEAP >= TINYCLR_VALIDATE_HEAP_3_Compaction
  336. bool CLR_RT_GarbageCollector::Relocation_JustCheck( void** ref )
  337. {
  338. NATIVE_PROFILE_CLR_CORE();
  339. CLR_UINT8* dst = (CLR_UINT8*)*ref;
  340. if(dst)
  341. {
  342. ValidateBlockNotInFreeList( g_CLR_RT_ExecutionEngine.m_heap, (CLR_RT_HeapBlock_Node*)dst );
  343. }
  344. return true;
  345. }
  346. #endif