summaryrefslogtreecommitdiff
path: root/indra/llcommon/llmemory.h
diff options
context:
space:
mode:
Diffstat (limited to 'indra/llcommon/llmemory.h')
-rwxr-xr-xindra/llcommon/llmemory.h9
1 files changed, 5 insertions, 4 deletions
diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h
index c4c9cc0566..0fb257aab1 100755
--- a/indra/llcommon/llmemory.h
+++ b/indra/llcommon/llmemory.h
@@ -28,6 +28,7 @@
#include "linden_common.h"
#include "llunits.h"
+#include "stdtypes.h"
#if !LL_WINDOWS
#include <stdint.h>
#endif
@@ -59,7 +60,7 @@ class LLMutex ;
LL_COMMON_API void ll_assert_aligned_func(uintptr_t ptr,U32 alignment);
#ifdef SHOW_ASSERT
-#define ll_assert_aligned(ptr,alignment) ll_assert_aligned_func(reinterpret_cast<uintptr_t>(ptr),((U32)alignment))
+#define ll_assert_aligned(ptr,alignment) ll_assert_aligned_func(uintptr_t(ptr),((U32)alignment))
#else
#define ll_assert_aligned(ptr,alignment)
#endif
@@ -69,13 +70,13 @@ LL_COMMON_API void ll_assert_aligned_func(uintptr_t ptr,U32 alignment);
template <typename T> T* LL_NEXT_ALIGNED_ADDRESS(T* address)
{
return reinterpret_cast<T*>(
- (reinterpret_cast<uintptr_t>(address) + 0xF) & ~0xF);
+ (uintptr_t(address) + 0xF) & ~0xF);
}
template <typename T> T* LL_NEXT_ALIGNED_ADDRESS_64(T* address)
{
return reinterpret_cast<T*>(
- (reinterpret_cast<uintptr_t>(address) + 0x3F) & ~0x3F);
+ (uintptr_t(address) + 0x3F) & ~0x3F);
}
#if LL_LINUX || LL_DARWIN
@@ -97,7 +98,7 @@ template <typename T> T* LL_NEXT_ALIGNED_ADDRESS_64(T* address)
//------------------------------------------------------------------------------------------------
//------------------------------------------------------------------------------------------------
// for enable buffer overrun detection predefine LL_DEBUG_BUFFER_OVERRUN in current library
- // change preprocessro code to: #if 1 && defined(LL_WINDOWS)
+ // change preprocessor code to: #if 1 && defined(LL_WINDOWS)
#if 0 && defined(LL_WINDOWS)
void* ll_aligned_malloc_fallback( size_t size, int align );