summaryrefslogtreecommitdiff
path: root/indra/llcommon/llmemory.h
diff options
context:
space:
mode:
authorruslantproductengine <ruslantproductengine@lindenlab.com>2014-10-27 17:10:08 +0200
committerruslantproductengine <ruslantproductengine@lindenlab.com>2014-10-27 17:10:08 +0200
commit366bcd0cbca43081fe58825fd463018e49b51740 (patch)
tree87ecb99f0a7d27ca4bd9effb94e0c9218379e84b /indra/llcommon/llmemory.h
parente3f62a54e6c3fe5423a1e766c4ee30388aa471bd (diff)
MAINT-4435 FIXED fix in llvolume.cpp Perform full build if number of vertices
less than allowed. Changes in all other files relate auxiliary methods for catching similar bugs in future.
Diffstat (limited to 'indra/llcommon/llmemory.h')
-rwxr-xr-xindra/llcommon/llmemory.h54
1 files changed, 33 insertions, 21 deletions
diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h
index 7d1d541a4b..c4c9cc0566 100755
--- a/indra/llcommon/llmemory.h
+++ b/indra/llcommon/llmemory.h
@@ -94,32 +94,44 @@ template <typename T> T* LL_NEXT_ALIGNED_ADDRESS_64(T* address)
#define LL_ALIGN_16(var) LL_ALIGN_PREFIX(16) var LL_ALIGN_POSTFIX(16)
-
-inline void* ll_aligned_malloc_fallback( size_t size, int align )
-{
-#if defined(LL_WINDOWS)
- return _aligned_malloc(size, align);
+//------------------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------------------
+ // for enable buffer overrun detection predefine LL_DEBUG_BUFFER_OVERRUN in current library
+ // change preprocessro code to: #if 1 && defined(LL_WINDOWS)
+
+#if 0 && defined(LL_WINDOWS)
+ void* ll_aligned_malloc_fallback( size_t size, int align );
+ void ll_aligned_free_fallback( void* ptr );
+//------------------------------------------------------------------------------------------------
#else
- void* mem = malloc( size + (align - 1) + sizeof(void*) );
- char* aligned = ((char*)mem) + sizeof(void*);
- aligned += align - ((uintptr_t)aligned & (align - 1));
-
- ((void**)aligned)[-1] = mem;
- return aligned;
-#endif
-}
+ inline void* ll_aligned_malloc_fallback( size_t size, int align )
+ {
+ #if defined(LL_WINDOWS)
+ return _aligned_malloc(size, align);
+ #else
+ void* mem = malloc( size + (align - 1) + sizeof(void*) );
+ char* aligned = ((char*)mem) + sizeof(void*);
+ aligned += align - ((uintptr_t)aligned & (align - 1));
+
+ ((void**)aligned)[-1] = mem;
+ return aligned;
+ #endif
+ }
-inline void ll_aligned_free_fallback( void* ptr )
-{
-#if defined(LL_WINDOWS)
- _aligned_free(ptr);
-#else
- if (ptr)
+ inline void ll_aligned_free_fallback( void* ptr )
{
- free( ((void**)ptr)[-1] );
+ #if defined(LL_WINDOWS)
+ _aligned_free(ptr);
+ #else
+ if (ptr)
+ {
+ free( ((void**)ptr)[-1] );
+ }
+ #endif
}
#endif
-}
+//------------------------------------------------------------------------------------------------
+//------------------------------------------------------------------------------------------------
#if !LL_USE_TCMALLOC
inline void* ll_aligned_malloc_16(size_t size) // returned hunk MUST be freed with ll_aligned_free_16().