PageRenderTime 42ms CodeModel.GetById 10ms app.highlight 25ms RepoModel.GetById 1ms app.codeStats 0ms

/indra/llcommon/llmemory.h

https://bitbucket.org/lindenlab/viewer-beta/
C++ Header | 515 lines | 381 code | 80 blank | 54 comment | 12 complexity | ca116ddeddd7f2408f1016e31beaa265 MD5 | raw file
  1/**
  2 * @file llmemory.h
  3 * @brief Memory allocation/deallocation header-stuff goes here.
  4 *
  5 * $LicenseInfo:firstyear=2002&license=viewerlgpl$
  6 * Second Life Viewer Source Code
  7 * Copyright (C) 2010, Linden Research, Inc.
  8 *
  9 * This library is free software; you can redistribute it and/or
 10 * modify it under the terms of the GNU Lesser General Public
 11 * License as published by the Free Software Foundation;
 12 * version 2.1 of the License only.
 13 *
 14 * This library is distributed in the hope that it will be useful,
 15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 17 * Lesser General Public License for more details.
 18 *
 19 * You should have received a copy of the GNU Lesser General Public
 20 * License along with this library; if not, write to the Free Software
 21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
 22 *
 23 * Linden Research, Inc., 945 Battery Street, San Francisco, CA  94111  USA
 24 * $/LicenseInfo$
 25 */
 26#ifndef LLMEMORY_H
 27#define LLMEMORY_H
 28
 29#include "llmemtype.h"
 30#if LL_DEBUG
 31inline void* ll_aligned_malloc( size_t size, int align )
 32{
 33	void* mem = malloc( size + (align - 1) + sizeof(void*) );
 34	char* aligned = ((char*)mem) + sizeof(void*);
 35	aligned += align - ((uintptr_t)aligned & (align - 1));
 36
 37	((void**)aligned)[-1] = mem;
 38	return aligned;
 39}
 40
 41inline void ll_aligned_free( void* ptr )
 42{
 43	free( ((void**)ptr)[-1] );
 44}
 45
 46inline void* ll_aligned_malloc_16(size_t size) // returned hunk MUST be freed with ll_aligned_free_16().
 47{
 48#if defined(LL_WINDOWS)
 49	return _mm_malloc(size, 16);
 50#elif defined(LL_DARWIN)
 51	return malloc(size); // default osx malloc is 16 byte aligned.
 52#else
 53	void *rtn;
 54	if (LL_LIKELY(0 == posix_memalign(&rtn, 16, size)))
 55		return rtn;
 56	else // bad alignment requested, or out of memory
 57		return NULL;
 58#endif
 59}
 60
 61inline void ll_aligned_free_16(void *p)
 62{
 63#if defined(LL_WINDOWS)
 64	_mm_free(p);
 65#elif defined(LL_DARWIN)
 66	return free(p);
 67#else
 68	free(p); // posix_memalign() is compatible with heap deallocator
 69#endif
 70}
 71
 72inline void* ll_aligned_malloc_32(size_t size) // returned hunk MUST be freed with ll_aligned_free_32().
 73{
 74#if defined(LL_WINDOWS)
 75	return _mm_malloc(size, 32);
 76#elif defined(LL_DARWIN)
 77	return ll_aligned_malloc( size, 32 );
 78#else
 79	void *rtn;
 80	if (LL_LIKELY(0 == posix_memalign(&rtn, 32, size)))
 81		return rtn;
 82	else // bad alignment requested, or out of memory
 83		return NULL;
 84#endif
 85}
 86
 87inline void ll_aligned_free_32(void *p)
 88{
 89#if defined(LL_WINDOWS)
 90	_mm_free(p);
 91#elif defined(LL_DARWIN)
 92	ll_aligned_free( p );
 93#else
 94	free(p); // posix_memalign() is compatible with heap deallocator
 95#endif
 96}
 97#else // LL_DEBUG
 98// ll_aligned_foo are noops now that we use tcmalloc everywhere (tcmalloc aligns automatically at appropriate intervals)
 99#define ll_aligned_malloc( size, align ) malloc(size)
100#define ll_aligned_free( ptr ) free(ptr)
101#define ll_aligned_malloc_16 malloc
102#define ll_aligned_free_16 free
103#define ll_aligned_malloc_32 malloc
104#define ll_aligned_free_32 free
105#endif // LL_DEBUG
106
107#ifndef __DEBUG_PRIVATE_MEM__
108#define __DEBUG_PRIVATE_MEM__  0
109#endif
110
111class LL_COMMON_API LLMemory
112{
113public:
114	static void initClass();
115	static void cleanupClass();
116	static void freeReserve();
117	// Return the resident set size of the current process, in bytes.
118	// Return value is zero if not known.
119	static U64 getCurrentRSS();
120	static U32 getWorkingSetSize();
121	static void* tryToAlloc(void* address, U32 size);
122	static void initMaxHeapSizeGB(F32 max_heap_size_gb, BOOL prevent_heap_failure);
123	static void updateMemoryInfo() ;
124	static void logMemoryInfo(BOOL update = FALSE);
125	static bool isMemoryPoolLow();
126
127	static U32 getAvailableMemKB() ;
128	static U32 getMaxMemKB() ;
129	static U32 getAllocatedMemKB() ;
130private:
131	static char* reserveMem;
132	static U32 sAvailPhysicalMemInKB ;
133	static U32 sMaxPhysicalMemInKB ;
134	static U32 sAllocatedMemInKB;
135	static U32 sAllocatedPageSizeInKB ;
136
137	static U32 sMaxHeapSizeInKB;
138	static BOOL sEnableMemoryFailurePrevention;
139};
140
141//----------------------------------------------------------------------------
142#if MEM_TRACK_MEM
143class LLMutex ;
144class LL_COMMON_API LLMemTracker
145{
146private:
147	LLMemTracker() ;
148	~LLMemTracker() ;
149
150public:
151	static void release() ;
152	static LLMemTracker* getInstance() ;
153
154	void track(const char* function, const int line) ;
155	void preDraw(BOOL pause) ;
156	void postDraw() ;
157	const char* getNextLine() ;
158
159private:
160	static LLMemTracker* sInstance ;
161	
162	char**     mStringBuffer ;
163	S32        mCapacity ;
164	U32        mLastAllocatedMem ;
165	S32        mCurIndex ;
166	S32        mCounter;
167	S32        mDrawnIndex;
168	S32        mNumOfDrawn;
169	BOOL       mPaused;
170	LLMutex*   mMutexp ;
171};
172
173#define MEM_TRACK_RELEASE LLMemTracker::release() ;
174#define MEM_TRACK         LLMemTracker::getInstance()->track(__FUNCTION__, __LINE__) ;
175
176#else // MEM_TRACK_MEM
177
178#define MEM_TRACK_RELEASE
179#define MEM_TRACK
180
181#endif // MEM_TRACK_MEM
182
183//----------------------------------------------------------------------------
184
185
186//
187//class LLPrivateMemoryPool defines a private memory pool for an application to use, so the application does not
188//need to access the heap directly fro each memory allocation. Throught this, the allocation speed is faster, 
189//and reduces virtaul address space gragmentation problem.
190//Note: this class is thread-safe by passing true to the constructor function. However, you do not need to do this unless
191//you are sure the memory allocation and de-allocation will happen in different threads. To make the pool thread safe
192//increases allocation and deallocation cost.
193//
194class LL_COMMON_API LLPrivateMemoryPool
195{
196	friend class LLPrivateMemoryPoolManager ;
197
198public:
199	class LL_COMMON_API LLMemoryBlock //each block is devided into slots uniformly
200	{
201	public: 
202		LLMemoryBlock() ;
203		~LLMemoryBlock() ;
204
205		void init(char* buffer, U32 buffer_size, U32 slot_size) ;
206		void setBuffer(char* buffer, U32 buffer_size) ;
207
208		char* allocate() ;
209		void  freeMem(void* addr) ;
210
211		bool empty() {return !mAllocatedSlots;}
212		bool isFull() {return mAllocatedSlots == mTotalSlots;}
213		bool isFree() {return !mTotalSlots;}
214
215		U32  getSlotSize()const {return mSlotSize;}
216		U32  getTotalSlots()const {return mTotalSlots;}
217		U32  getBufferSize()const {return mBufferSize;}
218		char* getBuffer() const {return mBuffer;}
219
220		//debug use
221		void resetBitMap() ;
222	private:
223		char* mBuffer;
224		U32   mSlotSize ; //when the block is not initialized, it is the buffer size.
225		U32   mBufferSize ;
226		U32   mUsageBits ;
227		U8    mTotalSlots ;
228		U8    mAllocatedSlots ;
229		U8    mDummySize ; //size of extra bytes reserved for mUsageBits.
230
231	public:
232		LLMemoryBlock* mPrev ;
233		LLMemoryBlock* mNext ;
234		LLMemoryBlock* mSelf ;
235
236		struct CompareAddress
237		{
238			bool operator()(const LLMemoryBlock* const& lhs, const LLMemoryBlock* const& rhs)
239			{
240				return (U32)lhs->getBuffer() < (U32)rhs->getBuffer();
241			}
242		};
243	};
244
245	class LL_COMMON_API LLMemoryChunk //is divided into memory blocks.
246	{
247	public:
248		LLMemoryChunk() ;
249		~LLMemoryChunk() ;
250
251		void init(char* buffer, U32 buffer_size, U32 min_slot_size, U32 max_slot_size, U32 min_block_size, U32 max_block_size) ;
252		void setBuffer(char* buffer, U32 buffer_size) ;
253
254		bool empty() ;
255		
256		char* allocate(U32 size) ;
257		void  freeMem(void* addr) ;
258
259		char* getBuffer() const {return mBuffer;}
260		U32 getBufferSize() const {return mBufferSize;}
261		U32 getAllocatedSize() const {return mAlloatedSize;}
262
263		bool containsAddress(const char* addr) const;
264
265		static U32 getMaxOverhead(U32 data_buffer_size, U32 min_slot_size, 
266													   U32 max_slot_size, U32 min_block_size, U32 max_block_size) ;
267	
268		void dump() ;
269
270	private:
271		U32 getPageIndex(U32 addr) ;
272		U32 getBlockLevel(U32 size) ;
273		U16 getPageLevel(U32 size) ;
274		LLMemoryBlock* addBlock(U32 blk_idx) ;
275		void popAvailBlockList(U32 blk_idx) ;
276		void addToFreeSpace(LLMemoryBlock* blk) ;
277		void removeFromFreeSpace(LLMemoryBlock* blk) ;
278		void removeBlock(LLMemoryBlock* blk) ;
279		void addToAvailBlockList(LLMemoryBlock* blk) ;
280		U32  calcBlockSize(U32 slot_size);
281		LLMemoryBlock* createNewBlock(LLMemoryBlock* blk, U32 buffer_size, U32 slot_size, U32 blk_idx) ;
282
283	private:
284		LLMemoryBlock** mAvailBlockList ;//256 by mMinSlotSize
285		LLMemoryBlock** mFreeSpaceList;
286		LLMemoryBlock*  mBlocks ; //index of blocks by address.
287		
288		char* mBuffer ;
289		U32   mBufferSize ;
290		char* mDataBuffer ;
291		char* mMetaBuffer ;
292		U32   mMinBlockSize ;
293		U32   mMinSlotSize ;
294		U32   mMaxSlotSize ;
295		U32   mAlloatedSize ;
296		U16   mBlockLevels;
297		U16   mPartitionLevels;
298
299	public:
300		//form a linked list
301		LLMemoryChunk* mNext ;
302		LLMemoryChunk* mPrev ;
303	} ;
304
305private:
306	LLPrivateMemoryPool(S32 type, U32 max_pool_size) ;
307	~LLPrivateMemoryPool() ;
308
309	char *allocate(U32 size) ;
310	void  freeMem(void* addr) ;
311	
312	void  dump() ;
313	U32   getTotalAllocatedSize() ;
314	U32   getTotalReservedSize() {return mReservedPoolSize;}
315	S32   getType() const {return mType; }
316	bool  isEmpty() const {return !mNumOfChunks; }
317
318private:
319	void lock() ;
320	void unlock() ;	
321	S32 getChunkIndex(U32 size) ;
322	LLMemoryChunk*  addChunk(S32 chunk_index) ;
323	bool checkSize(U32 asked_size) ;
324	void removeChunk(LLMemoryChunk* chunk) ;
325	U16  findHashKey(const char* addr);
326	void addToHashTable(LLMemoryChunk* chunk) ;
327	void removeFromHashTable(LLMemoryChunk* chunk) ;
328	void rehash() ;
329	bool fillHashTable(U16 start, U16 end, LLMemoryChunk* chunk) ;
330	LLMemoryChunk* findChunk(const char* addr) ;
331
332	void destroyPool() ;
333
334public:
335	enum
336	{
337		SMALL_ALLOCATION = 0, //from 8 bytes to 2KB(exclusive), page size 2KB, max chunk size is 4MB.
338		MEDIUM_ALLOCATION,    //from 2KB to 512KB(exclusive), page size 32KB, max chunk size 4MB
339		LARGE_ALLOCATION,     //from 512KB to 4MB(inclusive), page size 64KB, max chunk size 16MB
340		SUPER_ALLOCATION      //allocation larger than 4MB.
341	};
342
343	enum
344	{
345		STATIC = 0 ,       //static pool(each alllocation stays for a long time) without threading support
346		VOLATILE,          //Volatile pool(each allocation stays for a very short time) without threading support
347		STATIC_THREADED,   //static pool with threading support
348		VOLATILE_THREADED, //volatile pool with threading support
349		MAX_TYPES
350	}; //pool types
351
352private:
353	LLMutex* mMutexp ;
354	U32  mMaxPoolSize;
355	U32  mReservedPoolSize ;	
356
357	LLMemoryChunk* mChunkList[SUPER_ALLOCATION] ; //all memory chunks reserved by this pool, sorted by address	
358	U16 mNumOfChunks ;
359	U16 mHashFactor ;
360
361	S32 mType ;
362
363	class LLChunkHashElement
364	{
365	public:
366		LLChunkHashElement() {mFirst = NULL ; mSecond = NULL ;}
367
368		bool add(LLMemoryChunk* chunk) ;
369		void remove(LLMemoryChunk* chunk) ;
370		LLMemoryChunk* findChunk(const char* addr) ;
371
372		bool empty() {return !mFirst && !mSecond; }
373		bool full()  {return mFirst && mSecond; }
374		bool hasElement(LLMemoryChunk* chunk) {return mFirst == chunk || mSecond == chunk;}
375
376	private:
377		LLMemoryChunk* mFirst ;
378		LLMemoryChunk* mSecond ;
379	};
380	std::vector<LLChunkHashElement> mChunkHashList ;
381};
382
383class LL_COMMON_API LLPrivateMemoryPoolManager
384{
385private:
386	LLPrivateMemoryPoolManager(BOOL enabled, U32 max_pool_size) ;
387	~LLPrivateMemoryPoolManager() ;
388
389public:	
390	static LLPrivateMemoryPoolManager* getInstance() ;
391	static void initClass(BOOL enabled, U32 pool_size) ;
392	static void destroyClass() ;
393
394	LLPrivateMemoryPool* newPool(S32 type) ;
395	void deletePool(LLPrivateMemoryPool* pool) ;
396
397private:	
398	std::vector<LLPrivateMemoryPool*> mPoolList ;	
399	U32  mMaxPrivatePoolSize;
400
401	static LLPrivateMemoryPoolManager* sInstance ;
402	static BOOL sPrivatePoolEnabled;
403	static std::vector<LLPrivateMemoryPool*> sDanglingPoolList ;
404public:
405	//debug and statistics info.
406	void updateStatistics() ;
407
408	U32 mTotalReservedSize ;
409	U32 mTotalAllocatedSize ;
410
411public:
412#if __DEBUG_PRIVATE_MEM__
413	static char* allocate(LLPrivateMemoryPool* poolp, U32 size, const char* function, const int line) ;	
414	
415	typedef std::map<char*, std::string> mem_allocation_info_t ;
416	static mem_allocation_info_t sMemAllocationTracker;
417#else
418	static char* allocate(LLPrivateMemoryPool* poolp, U32 size) ;	
419#endif
420	static void  freeMem(LLPrivateMemoryPool* poolp, void* addr) ;
421};
422
423//-------------------------------------------------------------------------------------
424#if __DEBUG_PRIVATE_MEM__
425#define ALLOCATE_MEM(poolp, size) LLPrivateMemoryPoolManager::allocate((poolp), (size), __FUNCTION__, __LINE__)
426#else
427#define ALLOCATE_MEM(poolp, size) LLPrivateMemoryPoolManager::allocate((poolp), (size))
428#endif
429#define FREE_MEM(poolp, addr) LLPrivateMemoryPoolManager::freeMem((poolp), (addr))
430//-------------------------------------------------------------------------------------
431
432//
433//the below singleton is used to test the private memory pool.
434//
435#if 0
436class LL_COMMON_API LLPrivateMemoryPoolTester
437{
438private:
439	LLPrivateMemoryPoolTester() ;
440	~LLPrivateMemoryPoolTester() ;
441
442public:
443	static LLPrivateMemoryPoolTester* getInstance() ;
444	static void destroy() ;
445
446	void run(S32 type) ;	
447
448private:
449	void correctnessTest() ;
450	void performanceTest() ;
451	void fragmentationtest() ;
452
453	void test(U32 min_size, U32 max_size, U32 stride, U32 times, bool random_deletion, bool output_statistics) ;
454	void testAndTime(U32 size, U32 times) ;
455
456#if 0
457public:
458	void* operator new(size_t size)
459	{
460		return (void*)sPool->allocate(size) ;
461	}
462    void  operator delete(void* addr)
463	{
464		sPool->freeMem(addr) ;
465	}
466	void* operator new[](size_t size)
467	{
468		return (void*)sPool->allocate(size) ;
469	}
470    void  operator delete[](void* addr)
471	{
472		sPool->freeMem(addr) ;
473	}
474#endif
475
476private:
477	static LLPrivateMemoryPoolTester* sInstance;
478	static LLPrivateMemoryPool* sPool ;
479	static LLPrivateMemoryPool* sThreadedPool ;
480};
481#if 0
482//static
483void* LLPrivateMemoryPoolTester::operator new(size_t size)
484{
485	return (void*)sPool->allocate(size) ;
486}
487
488//static
489void  LLPrivateMemoryPoolTester::operator delete(void* addr)
490{
491	sPool->free(addr) ;
492}
493
494//static
495void* LLPrivateMemoryPoolTester::operator new[](size_t size)
496{
497	return (void*)sPool->allocate(size) ;
498}
499
500//static
501void  LLPrivateMemoryPoolTester::operator delete[](void* addr)
502{
503	sPool->free(addr) ;
504}
505#endif
506#endif
507// LLRefCount moved to llrefcount.h
508
509// LLPointer moved to llpointer.h
510
511// LLSafeHandle moved to llsafehandle.h
512
513// LLSingleton moved to llsingleton.h
514
515#endif