diff options
Diffstat (limited to 'indra/llcommon/llmemory.h')
-rw-r--r-- | indra/llcommon/llmemory.h | 40 |
1 files changed, 28 insertions, 12 deletions
diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h index 4480e381e8..4ead45679f 100644 --- a/indra/llcommon/llmemory.h +++ b/indra/llcommon/llmemory.h @@ -27,14 +27,18 @@ #define LLMEMORY_H #include "linden_common.h" +#if !LL_WINDOWS +#include <stdint.h> +#endif class LLMutex ; -#ifdef LL_WINDOWS -#define LL_ALIGNED(x) __declspec(align(x)) +#if LL_WINDOWS && LL_DEBUG +#define LL_CHECK_MEMORY llassert(_CrtCheckMemory()); #else -#define LL_ALIGNED(x) __attribute__ ((aligned (16))) +#define LL_CHECK_MEMORY #endif + inline void* ll_aligned_malloc( size_t size, int align ) { void* mem = malloc( size + (align - 1) + sizeof(void*) ); @@ -66,31 +70,43 @@ inline void* ll_aligned_malloc_16(size_t size) // returned hunk MUST be freed wi #endif } -inline void* ll_aligned_realloc_16(void* ptr, size_t size) // returned hunk MUST be freed with ll_aligned_free_16(). +inline void ll_aligned_free_16(void *p) { #if defined(LL_WINDOWS) - return _aligned_realloc(ptr, size, 16); + _aligned_free(p); #elif defined(LL_DARWIN) - return realloc(ptr,size); // default osx malloc is 16 byte aligned. + return free(p); #else - return realloc(ptr,size); // FIXME not guaranteed to be aligned. + free(p); // posix_memalign() is compatible with heap deallocator #endif } -inline void ll_aligned_free_16(void *p) +inline void* ll_aligned_realloc_16(void* ptr, size_t size, size_t old_size) // returned hunk MUST be freed with ll_aligned_free_16(). { #if defined(LL_WINDOWS) - _aligned_free(p); + return _aligned_realloc(ptr, size, 16); #elif defined(LL_DARWIN) - return free(p); + return realloc(ptr,size); // default osx malloc is 16 byte aligned. #else - free(p); // posix_memalign() is compatible with heap deallocator + //FIXME: memcpy is SLOW + void* ret = ll_aligned_malloc_16(size); + if (ptr) + { + if (ret) + { + // Only copy the size of the smallest memory block to avoid memory corruption. + memcpy(ret, ptr, llmin(old_size, size)); + } + ll_aligned_free_16(ptr); + } + return ret; #endif } + #else // USE_TCMALLOC // ll_aligned_foo_16 are not needed with tcmalloc #define ll_aligned_malloc_16 malloc -#define ll_aligned_realloc_16 realloc +#define ll_aligned_realloc_16(a,b,c) realloc(a,b) #define ll_aligned_free_16 free #endif // USE_TCMALLOC |