summaryrefslogtreecommitdiff
path: root/indra/llcommon/llmemory.h
diff options
context:
space:
mode:
Diffstat (limited to 'indra/llcommon/llmemory.h')
-rw-r--r--indra/llcommon/llmemory.h77
1 files changed, 54 insertions, 23 deletions
diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h
index 2704a495e0..41023b4ba4 100644
--- a/indra/llcommon/llmemory.h
+++ b/indra/llcommon/llmemory.h
@@ -111,6 +111,16 @@ public: \
void operator delete(void* ptr) \
{ \
ll_aligned_free_16(ptr); \
+ } \
+ \
+ void* operator new[](size_t size) \
+ { \
+ return ll_aligned_malloc_16(size); \
+ } \
+ \
+ void operator delete[](void* ptr) \
+ { \
+ ll_aligned_free_16(ptr); \
}
@@ -126,8 +136,9 @@ public: \
#else
inline void* ll_aligned_malloc_fallback( size_t size, int align )
{
+ LL_PROFILE_ZONE_SCOPED;
#if defined(LL_WINDOWS)
- return _aligned_malloc(size, align);
+ void* ret = _aligned_malloc(size, align);
#else
char* aligned = NULL;
void* mem = malloc( size + (align - 1) + sizeof(void*) );
@@ -138,12 +149,16 @@ public: \
((void**)aligned)[-1] = mem;
}
- return aligned;
+ void* ret = aligned;
#endif
+ LL_PROFILE_ALLOC(ret, size);
+ return ret;
}
inline void ll_aligned_free_fallback( void* ptr )
{
+ LL_PROFILE_ZONE_SCOPED;
+ LL_PROFILE_FREE(ptr);
#if defined(LL_WINDOWS)
_aligned_free(ptr);
#else
@@ -159,21 +174,24 @@ public: \
inline void* ll_aligned_malloc_16(size_t size) // returned hunk MUST be freed with ll_aligned_free_16().
{
+ LL_PROFILE_ZONE_SCOPED;
#if defined(LL_WINDOWS)
- return _aligned_malloc(size, 16);
+ void* ret = _aligned_malloc(size, 16);
#elif defined(LL_DARWIN)
- return malloc(size); // default osx malloc is 16 byte aligned.
+ void* ret = malloc(size); // default osx malloc is 16 byte aligned.
#else
- void *rtn;
- if (LL_LIKELY(0 == posix_memalign(&rtn, 16, size)))
- return rtn;
- else // bad alignment requested, or out of memory
- return NULL;
+ void *ret;
+ if (0 != posix_memalign(&ret, 16, size))
+ return nullptr;
#endif
+ LL_PROFILE_ALLOC(ret, size);
+ return ret;
}
inline void ll_aligned_free_16(void *p)
{
+ LL_PROFILE_ZONE_SCOPED;
+ LL_PROFILE_FREE(p);
#if defined(LL_WINDOWS)
_aligned_free(p);
#elif defined(LL_DARWIN)
@@ -185,10 +203,12 @@ inline void ll_aligned_free_16(void *p)
inline void* ll_aligned_realloc_16(void* ptr, size_t size, size_t old_size) // returned hunk MUST be freed with ll_aligned_free_16().
{
+ LL_PROFILE_ZONE_SCOPED;
+ LL_PROFILE_FREE(ptr);
#if defined(LL_WINDOWS)
- return _aligned_realloc(ptr, size, 16);
+ void* ret = _aligned_realloc(ptr, size, 16);
#elif defined(LL_DARWIN)
- return realloc(ptr,size); // default osx malloc is 16 byte aligned.
+ void* ret = realloc(ptr,size); // default osx malloc is 16 byte aligned.
#else
//FIXME: memcpy is SLOW
void* ret = ll_aligned_malloc_16(size);
@@ -201,27 +221,31 @@ inline void* ll_aligned_realloc_16(void* ptr, size_t size, size_t old_size) // r
}
ll_aligned_free_16(ptr);
}
- return ret;
#endif
+ LL_PROFILE_ALLOC(ptr, size);
+ return ret;
}
inline void* ll_aligned_malloc_32(size_t size) // returned hunk MUST be freed with ll_aligned_free_32().
{
+ LL_PROFILE_ZONE_SCOPED;
#if defined(LL_WINDOWS)
- return _aligned_malloc(size, 32);
+ void* ret = _aligned_malloc(size, 32);
#elif defined(LL_DARWIN)
- return ll_aligned_malloc_fallback( size, 32 );
+ void* ret = ll_aligned_malloc_fallback( size, 32 );
#else
- void *rtn;
- if (LL_LIKELY(0 == posix_memalign(&rtn, 32, size)))
- return rtn;
- else // bad alignment requested, or out of memory
- return NULL;
+ void *ret;
+ if (0 != posix_memalign(&ret, 32, size))
+ return nullptr;
#endif
+ LL_PROFILE_ALLOC(ret, size);
+ return ret;
}
inline void ll_aligned_free_32(void *p)
{
+ LL_PROFILE_ZONE_SCOPED;
+ LL_PROFILE_FREE(p);
#if defined(LL_WINDOWS)
_aligned_free(p);
#elif defined(LL_DARWIN)
@@ -235,29 +259,35 @@ inline void ll_aligned_free_32(void *p)
template<size_t ALIGNMENT>
LL_FORCE_INLINE void* ll_aligned_malloc(size_t size)
{
+ LL_PROFILE_ZONE_SCOPED;
+ void* ret;
if (LL_DEFAULT_HEAP_ALIGN % ALIGNMENT == 0)
{
- return malloc(size);
+ ret = malloc(size);
+ LL_PROFILE_ALLOC(ret, size);
}
else if (ALIGNMENT == 16)
{
- return ll_aligned_malloc_16(size);
+ ret = ll_aligned_malloc_16(size);
}
else if (ALIGNMENT == 32)
{
- return ll_aligned_malloc_32(size);
+ ret = ll_aligned_malloc_32(size);
}
else
{
- return ll_aligned_malloc_fallback(size, ALIGNMENT);
+ ret = ll_aligned_malloc_fallback(size, ALIGNMENT);
}
+ return ret;
}
template<size_t ALIGNMENT>
LL_FORCE_INLINE void ll_aligned_free(void* ptr)
{
+ LL_PROFILE_ZONE_SCOPED;
if (ALIGNMENT == LL_DEFAULT_HEAP_ALIGN)
{
+ LL_PROFILE_FREE(ptr);
free(ptr);
}
else if (ALIGNMENT == 16)
@@ -279,6 +309,7 @@ LL_FORCE_INLINE void ll_aligned_free(void* ptr)
//
inline void ll_memcpy_nonaliased_aligned_16(char* __restrict dst, const char* __restrict src, size_t bytes)
{
+ LL_PROFILE_ZONE_SCOPED;
assert(src != NULL);
assert(dst != NULL);
assert(bytes > 0);