summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Linden <none@none>2013-01-09 23:06:12 -0800
committerRichard Linden <none@none>2013-01-09 23:06:12 -0800
commita09719c0eb8d06d6bb689984a1b108932ea35314 (patch)
tree4212c95fdc834b18c9e8aa322daed8266a2fb6fc
parent27bec2b2f21f19a7c04707b03f83c8caa855872d (diff)
parent1a888f786c274ee6eaed54272718eeef5e685dbf (diff)
Automated merge with ssh://hg.lindenlab.com/richard/viewer-interesting-metrics
-rw-r--r--indra/llcommon/llmemory.h14
-rw-r--r--indra/llcommon/lltrace.h85
-rw-r--r--indra/newview/lldrawable.h12
3 files changed, 86 insertions, 25 deletions
diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h
index 4ead45679f..95500753e4 100644
--- a/indra/llcommon/llmemory.h
+++ b/indra/llcommon/llmemory.h
@@ -39,6 +39,20 @@ class LLMutex ;
#define LL_CHECK_MEMORY
#endif
+#if LL_WINDOWS
+#define LL_ALIGN_OF __alignof
+#else
+#define LL_ALIGN_OF __align_of__
+#endif
+
+#if LL_WINDOWS
+#define LL_DEFAULT_HEAP_ALIGN 8
+#elif LL_DARWIN
+#define LL_DEFAULT_HEAP_ALIGN 16
+#elif LL_LINUX
+#define LL_DEFAULT_HEAP_ALIGN 8
+#endif
+
inline void* ll_aligned_malloc( size_t size, int align )
{
void* mem = malloc( size + (align - 1) + sizeof(void*) );
diff --git a/indra/llcommon/lltrace.h b/indra/llcommon/lltrace.h
index 1a156e583e..8ec0cdc4dc 100644
--- a/indra/llcommon/lltrace.h
+++ b/indra/llcommon/lltrace.h
@@ -660,7 +660,57 @@ struct MemFootprint<std::list<T> >
}
};
-template<typename DERIVED>
+template <size_t ALIGNMENT, size_t RESERVE>
+void* allocAligned(size_t size)
+{
+ llstatic_assert((ALIGNMENT > 0) && (ALIGNMENT & (ALIGNMENT - 1)) == 0, "Alignment must be a power of 2");
+
+ void* padded_allocation;
+ const size_t aligned_reserve = (RESERVE / ALIGNMENT)
+ + ((RESERVE % ALIGNMENT) ? ALIGNMENT : 0);
+ const size_t size_with_reserve = size + aligned_reserve;
+ if (ALIGNMENT <= LL_DEFAULT_HEAP_ALIGN)
+ {
+ padded_allocation = malloc(size_with_reserve);
+ }
+ else
+ {
+#if LL_WINDOWS
+ padded_allocation = _aligned_malloc(size_with_reserve, ALIGNMENT);
+#elif LL_DARWIN
+ padded_allocation = ll_aligned_malloc(size_with_reserve, ALIGNMENT);
+#else
+ posix_memalign(&padded_allocation, ALIGNMENT, size_with_reserve);
+#endif
+ }
+ return (char*)padded_allocation + aligned_reserve;
+}
+
+template<size_t ALIGNMENT, size_t RESERVE>
+void deallocAligned(void* ptr)
+{
+ const size_t aligned_reserve = (RESERVE / ALIGNMENT)
+ + ((RESERVE % ALIGNMENT) ? ALIGNMENT : 0);
+
+ void* original_allocation = (char*)ptr - aligned_reserve;
+
+ if (ALIGNMENT <= LL_DEFAULT_HEAP_ALIGN)
+ {
+ free(original_allocation);
+ }
+ else
+ {
+#if LL_WINDOWS
+ _aligned_free(original_allocation);
+#elif LL_DARWIN
+ ll_aligned_free(original_allocation);
+#else
+ free(original_allocation);
+#endif
+ }
+}
+
+template<typename DERIVED, size_t ALIGNMENT = LL_DEFAULT_HEAP_ALIGN>
class MemTrackable
{
template<typename TRACKED, typename TRACKED_IS_TRACKER>
@@ -676,44 +726,49 @@ public:
memDisclaim(mMemFootprint);
}
- void* operator new(size_t allocation_size)
+ void* operator new(size_t size)
{
- // reserve 8 bytes for allocation size (and preserving 8 byte alignment of structs)
- void* allocation = ::operator new(allocation_size + 8);
- *(size_t*)allocation = allocation_size;
MemStatAccumulator* accumulator = DERIVED::sMemStat.getPrimaryAccumulator();
if (accumulator)
{
- accumulator->mSize += allocation_size;
+ accumulator->mSize += size;
accumulator->mAllocatedCount++;
}
- return (void*)((char*)allocation + 8);
+
+ // reserve 4 bytes for allocation size (and preserving requested alignment)
+ void* allocation = allocAligned<ALIGNMENT, sizeof(size_t)>(size);
+ ((size_t*)allocation)[-1] = size;
+
+ return allocation;
}
void operator delete(void* ptr)
{
- size_t* allocation_size = (size_t*)((char*)ptr - 8);
+ size_t allocation_size = ((size_t*)ptr)[-1];
MemStatAccumulator* accumulator = DERIVED::sMemStat.getPrimaryAccumulator();
if (accumulator)
{
- accumulator->mSize -= *allocation_size;
+ accumulator->mSize -= allocation_size;
accumulator->mAllocatedCount--;
accumulator->mDeallocatedCount++;
}
- ::delete((char*)ptr - 8);
+ deallocAligned<ALIGNMENT, sizeof(size_t)>(ptr);
}
void *operator new [](size_t size)
{
- size_t* result = (size_t*)malloc(size + 8);
- *result = size;
MemStatAccumulator* accumulator = DERIVED::sMemStat.getPrimaryAccumulator();
if (accumulator)
{
accumulator->mSize += size;
accumulator->mAllocatedCount++;
}
- return (void*)((char*)result + 8);
+
+ // reserve 4 bytes for allocation size (and preserving requested alignment)
+ void* allocation = allocAligned<ALIGNMENT, sizeof(size_t)>(size);
+ ((size_t*)allocation)[-1] = size;
+
+ return allocation;
}
void operator delete[](void* ptr)
@@ -726,7 +781,7 @@ public:
accumulator->mAllocatedCount--;
accumulator->mDeallocatedCount++;
}
- ::delete[]((char*)ptr - 8);
+ deallocAligned<ALIGNMENT, sizeof(size_T)>(ptr);
}
// claim memory associated with other objects/data as our own, adding to our calculated footprint
@@ -783,6 +838,8 @@ public:
private:
size_t mMemFootprint;
+
+
template<typename TRACKED, typename TRACKED_IS_TRACKER = void>
struct TrackMemImpl
{
diff --git a/indra/newview/lldrawable.h b/indra/newview/lldrawable.h
index abfdea2699..c22cce246b 100644
--- a/indra/newview/lldrawable.h
+++ b/indra/newview/lldrawable.h
@@ -61,7 +61,7 @@ const U32 SILHOUETTE_HIGHLIGHT = 0;
LL_ALIGN_PREFIX(16)
class LLDrawable
: public LLRefCount,
- public LLTrace::MemTrackable<LLDrawable>
+ public LLTrace::MemTrackable<LLDrawable, 16>
{
public:
LLDrawable(const LLDrawable& rhs)
@@ -77,16 +77,6 @@ public:
static void initClass();
- void* operator new(size_t size)
- {
- return ll_aligned_malloc_16(size);
- }
-
- void operator delete(void* ptr)
- {
- ll_aligned_free_16(ptr);
- }
-
LLDrawable() { init(); }
void markDead(); // Mark this drawable as dead