From 7200f9fb2dfb106e0afe36b54cbc9fb826de7344 Mon Sep 17 00:00:00 2001 From: Ptolemy Date: Thu, 13 Jan 2022 12:25:21 -0800 Subject: SL-16606: Add profiler category MEMORY --- indra/llcommon/llcommon.cpp | 4 ++-- indra/llcommon/llmemory.h | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) (limited to 'indra/llcommon') diff --git a/indra/llcommon/llcommon.cpp b/indra/llcommon/llcommon.cpp index 25a809dad2..d2c4e66160 100644 --- a/indra/llcommon/llcommon.cpp +++ b/indra/llcommon/llcommon.cpp @@ -42,7 +42,7 @@ void *operator new(size_t size) void* ptr; if (gProfilerEnabled) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; ptr = (malloc)(size); } else @@ -62,7 +62,7 @@ void operator delete(void *ptr) noexcept TracyFree(ptr); if (gProfilerEnabled) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; (free)(ptr); } else diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h index 41023b4ba4..ac6c969d70 100644 --- a/indra/llcommon/llmemory.h +++ b/indra/llcommon/llmemory.h @@ -136,7 +136,7 @@ public: \ #else inline void* ll_aligned_malloc_fallback( size_t size, int align ) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; #if defined(LL_WINDOWS) void* ret = _aligned_malloc(size, align); #else @@ -157,7 +157,7 @@ public: \ inline void ll_aligned_free_fallback( void* ptr ) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; LL_PROFILE_FREE(ptr); #if defined(LL_WINDOWS) _aligned_free(ptr); @@ -174,7 +174,7 @@ public: \ inline void* ll_aligned_malloc_16(size_t size) // returned hunk MUST be freed with ll_aligned_free_16(). { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; #if defined(LL_WINDOWS) void* ret = _aligned_malloc(size, 16); #elif defined(LL_DARWIN) @@ -190,7 +190,7 @@ inline void* ll_aligned_malloc_16(size_t size) // returned hunk MUST be freed wi inline void ll_aligned_free_16(void *p) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; LL_PROFILE_FREE(p); #if defined(LL_WINDOWS) _aligned_free(p); @@ -203,7 +203,7 @@ inline void ll_aligned_free_16(void *p) inline void* ll_aligned_realloc_16(void* ptr, size_t size, size_t old_size) // returned hunk MUST be freed with ll_aligned_free_16(). { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; LL_PROFILE_FREE(ptr); #if defined(LL_WINDOWS) void* ret = _aligned_realloc(ptr, size, 16); @@ -228,7 +228,7 @@ inline void* ll_aligned_realloc_16(void* ptr, size_t size, size_t old_size) // r inline void* ll_aligned_malloc_32(size_t size) // returned hunk MUST be freed with ll_aligned_free_32(). { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; #if defined(LL_WINDOWS) void* ret = _aligned_malloc(size, 32); #elif defined(LL_DARWIN) @@ -244,7 +244,7 @@ inline void* ll_aligned_malloc_32(size_t size) // returned hunk MUST be freed wi inline void ll_aligned_free_32(void *p) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; LL_PROFILE_FREE(p); #if defined(LL_WINDOWS) _aligned_free(p); @@ -259,7 +259,7 @@ inline void ll_aligned_free_32(void *p) template LL_FORCE_INLINE void* ll_aligned_malloc(size_t size) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; void* ret; if (LL_DEFAULT_HEAP_ALIGN % ALIGNMENT == 0) { @@ -284,7 +284,7 @@ LL_FORCE_INLINE void* ll_aligned_malloc(size_t size) template LL_FORCE_INLINE void ll_aligned_free(void* ptr) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; if (ALIGNMENT == LL_DEFAULT_HEAP_ALIGN) { LL_PROFILE_FREE(ptr); @@ -309,7 +309,7 @@ LL_FORCE_INLINE void ll_aligned_free(void* ptr) // inline void ll_memcpy_nonaliased_aligned_16(char* __restrict dst, const char* __restrict src, size_t bytes) { - LL_PROFILE_ZONE_SCOPED; + LL_PROFILE_ZONE_SCOPED_CATEGORY_MEMORY; assert(src != NULL); assert(dst != NULL); assert(bytes > 0); -- cgit v1.2.3