summaryrefslogtreecommitdiff
path: root/indra/llcommon
diff options
context:
space:
mode:
Diffstat (limited to 'indra/llcommon')
-rw-r--r--indra/llcommon/linden_common.h10
-rw-r--r--indra/llcommon/llcommon.cpp42
-rw-r--r--indra/llcommon/llerror.cpp12
-rw-r--r--indra/llcommon/llerror.h5
-rw-r--r--indra/llcommon/llerrorcontrol.h1
-rw-r--r--indra/llcommon/lleventfilter.h2
-rw-r--r--indra/llcommon/llmemory.cpp2
-rw-r--r--indra/llcommon/llmutex.cpp12
-rw-r--r--indra/llcommon/llprofiler.h47
-rw-r--r--indra/llcommon/llsdutil.cpp22
-rw-r--r--indra/llcommon/llsys.cpp1
-rw-r--r--indra/llcommon/llthread.cpp7
-rw-r--r--indra/llcommon/lluuid.h11
13 files changed, 163 insertions, 11 deletions
diff --git a/indra/llcommon/linden_common.h b/indra/llcommon/linden_common.h
index 45ac43910c..a228fd22be 100644
--- a/indra/llcommon/linden_common.h
+++ b/indra/llcommon/linden_common.h
@@ -27,6 +27,14 @@
#ifndef LL_LINDEN_COMMON_H
#define LL_LINDEN_COMMON_H
+#include "llprofiler.h"
+#if TRACY_ENABLE && !defined(LL_PROFILER_ENABLE_TRACY_OPENGL) // hooks for memory profiling
+void *tracy_aligned_malloc(size_t size, size_t alignment);
+void tracy_aligned_free(void *memblock);
+#define _aligned_malloc(X, Y) tracy_aligned_malloc((X), (Y))
+#define _aligned_free(X) tracy_aligned_free((X))
+#endif
+
// *NOTE: Please keep includes here to a minimum!
//
// Files included here are included in every library .cpp file and
@@ -60,6 +68,4 @@
#include "llerror.h"
#include "llfile.h"
-#include "llprofiler.h" // must be before fast timer; needed due to LLThreads potentially needing access to tracy
-
#endif
diff --git a/indra/llcommon/llcommon.cpp b/indra/llcommon/llcommon.cpp
index 96be913d17..da61e7539a 100644
--- a/indra/llcommon/llcommon.cpp
+++ b/indra/llcommon/llcommon.cpp
@@ -33,6 +33,48 @@
#include "lltracethreadrecorder.h"
#include "llcleanup.h"
+#if (TRACY_ENABLE)
+// Override new/delet for tracy memory profiling
+void *operator new(size_t size)
+{
+ auto ptr = (malloc) (size);
+ if (!ptr)
+ {
+ throw std::bad_alloc();
+ return nullptr;
+ }
+ TracyAlloc(ptr, size);
+ return ptr;
+}
+
+void operator delete(void *ptr) noexcept
+{
+ TracyFree(ptr);
+ (free)(ptr);
+}
+
+// C-style malloc/free can't be so easily overridden, so we define tracy versions and use
+// a pre-processor #define in linden_common.h to redirect to them. The parens around the native
+// functions below prevents recursive substitution by the preprocessor.
+//
+// Unaligned mallocs are rare in LL code but hooking them causes problems in 3p lib code (looking at
+// you, Havok), so we'll only capture the aligned version.
+
+void *tracy_aligned_malloc(size_t size, size_t alignment)
+{
+ auto ptr = (_aligned_malloc) (size, alignment);
+ if (ptr) TracyAlloc(ptr, size);
+ return ptr;
+}
+
+void tracy_aligned_free(void *memblock)
+{
+ TracyFree(memblock);
+ (_aligned_free)(memblock);
+}
+
+#endif
+
//static
BOOL LLCommon::sAprInitialized = FALSE;
diff --git a/indra/llcommon/llerror.cpp b/indra/llcommon/llerror.cpp
index 8355df9045..f7af181927 100644
--- a/indra/llcommon/llerror.cpp
+++ b/indra/llcommon/llerror.cpp
@@ -109,6 +109,7 @@ namespace {
virtual void recordMessage(LLError::ELevel level,
const std::string& message) override
{
+ LL_PROFILE_ZONE_SCOPED
int syslogPriority = LOG_CRIT;
switch (level) {
case LLError::LEVEL_DEBUG: syslogPriority = LOG_DEBUG; break;
@@ -166,6 +167,7 @@ namespace {
virtual void recordMessage(LLError::ELevel level,
const std::string& message) override
{
+ LL_PROFILE_ZONE_SCOPED
if (LLError::getAlwaysFlush())
{
mFile << message << std::endl;
@@ -208,6 +210,7 @@ namespace {
virtual void recordMessage(LLError::ELevel level,
const std::string& message) override
{
+ LL_PROFILE_ZONE_SCOPED
static std::string s_ansi_error = createANSI("31"); // red
static std::string s_ansi_warn = createANSI("34"); // blue
static std::string s_ansi_debug = createANSI("35"); // magenta
@@ -220,7 +223,8 @@ namespace {
}
else
{
- fprintf(stderr, "%s\n", message.c_str());
+ LL_PROFILE_ZONE_NAMED("fprintf");
+ fprintf(stderr, "%s\n", message.c_str());
}
}
@@ -229,6 +233,7 @@ namespace {
LL_FORCE_INLINE void writeANSI(const std::string& ansi_code, const std::string& message)
{
+ LL_PROFILE_ZONE_SCOPED
static std::string s_ansi_bold = createANSI("1"); // bold
static std::string s_ansi_reset = createANSI("0"); // reset
// ANSI color code escape sequence, message, and reset in one fprintf call
@@ -265,6 +270,7 @@ namespace {
virtual void recordMessage(LLError::ELevel level,
const std::string& message) override
{
+ LL_PROFILE_ZONE_SCOPED
mBuffer->addLine(message);
}
@@ -291,6 +297,7 @@ namespace {
virtual void recordMessage(LLError::ELevel level,
const std::string& message) override
{
+ LL_PROFILE_ZONE_SCOPED
debugger_print(message);
}
};
@@ -1178,6 +1185,7 @@ namespace
void writeToRecorders(const LLError::CallSite& site, const std::string& message)
{
+ LL_PROFILE_ZONE_SCOPED
LLError::ELevel level = site.mLevel;
LLError::SettingsConfigPtr s = LLError::Settings::getInstance()->getSettingsConfig();
@@ -1311,6 +1319,7 @@ namespace LLError
bool Log::shouldLog(CallSite& site)
{
+ LL_PROFILE_ZONE_SCOPED
LLMutexTrylock lock(getMutex<LOG_MUTEX>(), 5);
if (!lock.isLocked())
{
@@ -1354,6 +1363,7 @@ namespace LLError
void Log::flush(const std::ostringstream& out, const CallSite& site)
{
+ LL_PROFILE_ZONE_SCOPED
LLMutexTrylock lock(getMutex<LOG_MUTEX>(),5);
if (!lock.isLocked())
{
diff --git a/indra/llcommon/llerror.h b/indra/llcommon/llerror.h
index d439136ca8..d06c0e2132 100644
--- a/indra/llcommon/llerror.h
+++ b/indra/llcommon/llerror.h
@@ -35,7 +35,9 @@
#include "stdtypes.h"
+#include "llprofiler.h"
#include "llpreprocessor.h"
+
#include <boost/static_assert.hpp>
const int LL_ERR_NOERR = 0;
@@ -348,7 +350,8 @@ typedef LLError::NoClassInfo _LL_CLASS_TO_LOG;
// if (condition) LL_INFOS() << "True" << LL_ENDL; else LL_INFOS()() << "False" << LL_ENDL;
#define lllog(level, once, ...) \
- do { \
+ do { \
+ LL_PROFILE_ZONE_NAMED("lllog"); \
const char* tags[] = {"", ##__VA_ARGS__}; \
static LLError::CallSite _site(lllog_site_args_(level, once, tags)); \
lllog_test_()
diff --git a/indra/llcommon/llerrorcontrol.h b/indra/llcommon/llerrorcontrol.h
index e87bb7bf35..57f10b7895 100644
--- a/indra/llcommon/llerrorcontrol.h
+++ b/indra/llcommon/llerrorcontrol.h
@@ -190,6 +190,7 @@ namespace LLError
{}
void recordMessage(LLError::ELevel level, const std::string& message) override
{
+ LL_PROFILE_ZONE_SCOPED
mCallable(level, message);
}
private:
diff --git a/indra/llcommon/lleventfilter.h b/indra/llcommon/lleventfilter.h
index 48c2570732..7613850fb2 100644
--- a/indra/llcommon/lleventfilter.h
+++ b/indra/llcommon/lleventfilter.h
@@ -429,6 +429,8 @@ public:
// path, then stores it to mTarget.
virtual bool post(const LLSD& event)
{
+ LL_PROFILE_ZONE_SCOPED
+
// Extract the element specified by 'mPath' from 'event'. To perform a
// generic type-appropriate store through mTarget, construct an
// LLSDParam<T> and store that, thus engaging LLSDParam's custom
diff --git a/indra/llcommon/llmemory.cpp b/indra/llcommon/llmemory.cpp
index ea84e4c1ea..849867586a 100644
--- a/indra/llcommon/llmemory.cpp
+++ b/indra/llcommon/llmemory.cpp
@@ -82,6 +82,7 @@ void LLMemory::initMaxHeapSizeGB(F32Gigabytes max_heap_size)
//static
void LLMemory::updateMemoryInfo()
{
+ LL_PROFILE_ZONE_SCOPED
#if LL_WINDOWS
PROCESS_MEMORY_COUNTERS counters;
@@ -145,6 +146,7 @@ void* LLMemory::tryToAlloc(void* address, U32 size)
//static
void LLMemory::logMemoryInfo(BOOL update)
{
+ LL_PROFILE_ZONE_SCOPED
if(update)
{
updateMemoryInfo() ;
diff --git a/indra/llcommon/llmutex.cpp b/indra/llcommon/llmutex.cpp
index 4d73c04d07..a49002b5dc 100644
--- a/indra/llcommon/llmutex.cpp
+++ b/indra/llcommon/llmutex.cpp
@@ -44,6 +44,7 @@ LLMutex::~LLMutex()
void LLMutex::lock()
{
+ LL_PROFILE_ZONE_SCOPED
if(isSelfLocked())
{ //redundant lock
mCount++;
@@ -65,6 +66,7 @@ void LLMutex::lock()
void LLMutex::unlock()
{
+ LL_PROFILE_ZONE_SCOPED
if (mCount > 0)
{ //not the root unlock
mCount--;
@@ -85,6 +87,7 @@ void LLMutex::unlock()
bool LLMutex::isLocked()
{
+ LL_PROFILE_ZONE_SCOPED
if (!mMutex.try_lock())
{
return true;
@@ -108,6 +111,7 @@ LLThread::id_t LLMutex::lockingThread() const
bool LLMutex::trylock()
{
+ LL_PROFILE_ZONE_SCOPED
if(isSelfLocked())
{ //redundant lock
mCount++;
@@ -146,17 +150,20 @@ LLCondition::~LLCondition()
void LLCondition::wait()
{
+ LL_PROFILE_ZONE_SCOPED
std::unique_lock< std::mutex > lock(mMutex);
mCond.wait(lock);
}
void LLCondition::signal()
{
+ LL_PROFILE_ZONE_SCOPED
mCond.notify_one();
}
void LLCondition::broadcast()
{
+ LL_PROFILE_ZONE_SCOPED
mCond.notify_all();
}
@@ -166,6 +173,7 @@ LLMutexTrylock::LLMutexTrylock(LLMutex* mutex)
: mMutex(mutex),
mLocked(false)
{
+ LL_PROFILE_ZONE_SCOPED
if (mMutex)
mLocked = mMutex->trylock();
}
@@ -174,6 +182,7 @@ LLMutexTrylock::LLMutexTrylock(LLMutex* mutex, U32 aTries, U32 delay_ms)
: mMutex(mutex),
mLocked(false)
{
+ LL_PROFILE_ZONE_SCOPED
if (!mMutex)
return;
@@ -188,6 +197,7 @@ LLMutexTrylock::LLMutexTrylock(LLMutex* mutex, U32 aTries, U32 delay_ms)
LLMutexTrylock::~LLMutexTrylock()
{
+ LL_PROFILE_ZONE_SCOPED
if (mMutex && mLocked)
mMutex->unlock();
}
@@ -199,6 +209,7 @@ LLMutexTrylock::~LLMutexTrylock()
//
LLScopedLock::LLScopedLock(std::mutex* mutex) : mMutex(mutex)
{
+ LL_PROFILE_ZONE_SCOPED
if(mutex)
{
mutex->lock();
@@ -217,6 +228,7 @@ LLScopedLock::~LLScopedLock()
void LLScopedLock::unlock()
{
+ LL_PROFILE_ZONE_SCOPED
if(mLocked)
{
mMutex->unlock();
diff --git a/indra/llcommon/llprofiler.h b/indra/llcommon/llprofiler.h
index 062c9360dd..49510df913 100644
--- a/indra/llcommon/llprofiler.h
+++ b/indra/llcommon/llprofiler.h
@@ -45,22 +45,55 @@
// #define TRACY_ONLY_LOCALHOST 1
#define TRACY_ONLY_IPV4 1
#include "Tracy.hpp"
+
+ // Mutually exclusive with detailed memory tracing
+ #define LL_PROFILER_ENABLE_TRACY_OPENGL 0
#endif
#if LL_PROFILER_CONFIGURATION == LL_PROFILER_CONFIG_TRACY
- #define LL_PROFILER_FRAME_END FrameMark
- #define LL_PROFILER_SET_THREAD_NAME( name ) tracy::SetThreadName( name )
- #define LL_RECORD_BLOCK_TIME(name) ZoneNamedN( ___tracy_scoped_zone, #name, true );
+ #define LL_PROFILER_FRAME_END FrameMark
+ #define LL_PROFILER_SET_THREAD_NAME( name ) tracy::SetThreadName( name )
+ #define LL_RECORD_BLOCK_TIME(name) ZoneScoped // Want descriptive names; was: ZoneNamedN( ___tracy_scoped_zone, #name, true );
+ #define LL_PROFILE_ZONE_NAMED(name) ZoneNamedN( ___tracy_scoped_zone, name, true );
+ #define LL_PROFILE_ZONE_NAMED_COLOR(name,color) ZoneNamedNC( ___tracy_scopped_zone, name, color, true ) // RGB
+ #define LL_PROFILE_ZONE_SCOPED ZoneScoped
+
+ #define LL_PROFILE_ZONE_NUM( val ) ZoneValue( val )
+ #define LL_PROFILE_ZONE_TEXT( text, size ) ZoneText( text, size )
+
+ #define LL_PROFILE_ZONE_ERR(name) LL_PROFILE_ZONE_NAMED_COLOR( name, 0XFF0000 ) // RGB yellow
+ #define LL_PROFILE_ZONE_INFO(name) LL_PROFILE_ZONE_NAMED_COLOR( name, 0X00FFFF ) // RGB cyan
+ #define LL_PROFILE_ZONE_WARN(name) LL_PROFILE_ZONE_NAMED_COLOR( name, 0x0FFFF00 ) // RGB red
#endif
#if LL_PROFILER_CONFIGURATION == LL_PROFILER_CONFIG_FAST_TIMER
#define LL_PROFILER_FRAME_END
- #define LL_PROFILER_SET_THREAD_NAME( name ) (void)(name)
+ #define LL_PROFILER_SET_THREAD_NAME( name ) (void)(name)
#define LL_RECORD_BLOCK_TIME(name) const LLTrace::BlockTimer& LL_GLUE_TOKENS(block_time_recorder, __LINE__)(LLTrace::timeThisBlock(name)); (void)LL_GLUE_TOKENS(block_time_recorder, __LINE__);
+ #define LL_PROFILE_ZONE_NAMED(name) // LL_PROFILE_ZONE_NAMED is a no-op when Tracy is disabled
+ #define LL_PROFILE_ZONE_SCOPED // LL_PROFILE_ZONE_SCOPED is a no-op when Tracy is disabled
+ #define LL_PROFILE_ZONE_COLOR(name,color) // LL_RECORD_BLOCK_TIME(name)
+
+ #define LL_PROFILE_ZONE_NUM( val ) (void)( val ); // Not supported
+ #define LL_PROFILE_ZONE_TEXT( text, size ) (void)( text ); void( size ); // Not supported
+
+ #define LL_PROFILE_ZONE_ERR(name) (void)(name); // Not supported
+ #define LL_PROFILE_ZONE_INFO(name) (void)(name); // Not supported
+ #define LL_PROFILE_ZONE_WARN(name) (void)(name); // Not supported
#endif
#if LL_PROFILER_CONFIGURATION == LL_PROFILER_CONFIG_TRACY_FAST_TIMER
- #define LL_PROFILER_FRAME_END FrameMark
- #define LL_PROFILER_SET_THREAD_NAME( name ) tracy::SetThreadName( name )
- #define LL_RECORD_BLOCK_TIME(name) ZoneNamedN( ___tracy_scoped_zone, #timer_stat, true ) const LLTrace::BlockTimer& LL_GLUE_TOKENS(block_time_recorder, __LINE__)(LLTrace::timeThisBlock(name)); (void)LL_GLUE_TOKENS(block_time_recorder, __LINE__);
+ #define LL_PROFILER_FRAME_END FrameMark
+ #define LL_PROFILER_SET_THREAD_NAME( name ) tracy::SetThreadName( name )
+ #define LL_RECORD_BLOCK_TIME(name) ZoneScoped const LLTrace::BlockTimer& LL_GLUE_TOKENS(block_time_recorder, __LINE__)(LLTrace::timeThisBlock(name)); (void)LL_GLUE_TOKENS(block_time_recorder, __LINE__);
+ #define LL_PROFILE_ZONE_NAMED(name) ZoneNamedN( ___tracy_scoped_zone, #name, true );
+ #define LL_PROFILE_ZONE_NAMED_COLOR(name,color) ZoneNamedNC( ___tracy_scopped_zone, name, color, true ) // RGB
+ #define LL_PROFILE_ZONE_SCOPED ZoneScoped
+
+ #define LL_PROFILE_ZONE_NUM( val ) ZoneValue( val )
+ #define LL_PROFILE_ZONE_TEXT( text, size ) ZoneText( text, size )
+
+ #define LL_PROFILE_ZONE_ERR(name) LL_PROFILE_ZONE_NAMED_COLOR( name, 0XFF0000 ) // RGB yellow
+ #define LL_PROFILE_ZONE_INFO(name) LL_PROFILE_ZONE_NAMED_COLOR( name, 0X00FFFF ) // RGB cyan
+ #define LL_PROFILE_ZONE_WARN(name) LL_PROFILE_ZONE_NAMED_COLOR( name, 0x0FFFF00 ) // RGB red
#endif
#else
#define LL_PROFILER_FRAME_END
diff --git a/indra/llcommon/llsdutil.cpp b/indra/llcommon/llsdutil.cpp
index eb3a96b133..c2fe15e9b7 100644
--- a/indra/llcommon/llsdutil.cpp
+++ b/indra/llcommon/llsdutil.cpp
@@ -214,6 +214,8 @@ BOOL compare_llsd_with_template(
const LLSD& template_llsd,
LLSD& resultant_llsd)
{
+ LL_PROFILE_ZONE_SCOPED
+
if (
llsd_to_test.isUndefined() &&
template_llsd.isDefined() )
@@ -335,6 +337,8 @@ bool filter_llsd_with_template(
const LLSD & template_llsd,
LLSD & resultant_llsd)
{
+ LL_PROFILE_ZONE_SCOPED
+
if (llsd_to_test.isUndefined() && template_llsd.isDefined())
{
resultant_llsd = template_llsd;
@@ -529,6 +533,8 @@ class TypeLookup
public:
TypeLookup()
{
+ LL_PROFILE_ZONE_SCOPED
+
for (const Data *di(boost::begin(typedata)), *dend(boost::end(typedata)); di != dend; ++di)
{
mMap[di->type] = di->name;
@@ -537,6 +543,8 @@ public:
std::string lookup(LLSD::Type type) const
{
+ LL_PROFILE_ZONE_SCOPED
+
MapType::const_iterator found = mMap.find(type);
if (found != mMap.end())
{
@@ -587,6 +595,8 @@ static std::string match_types(LLSD::Type expect, // prototype.type()
LLSD::Type actual, // type we're checking
const std::string& pfx) // as for llsd_matches
{
+ LL_PROFILE_ZONE_SCOPED
+
// Trivial case: if the actual type is exactly what we expect, we're good.
if (actual == expect)
return "";
@@ -624,6 +634,8 @@ static std::string match_types(LLSD::Type expect, // prototype.type()
// see docstring in .h file
std::string llsd_matches(const LLSD& prototype, const LLSD& data, const std::string& pfx)
{
+ LL_PROFILE_ZONE_SCOPED
+
// An undefined prototype means that any data is valid.
// An undefined slot in an array or map prototype means that any data
// may fill that slot.
@@ -756,6 +768,8 @@ std::string llsd_matches(const LLSD& prototype, const LLSD& data, const std::str
bool llsd_equals(const LLSD& lhs, const LLSD& rhs, int bits)
{
+ LL_PROFILE_ZONE_SCOPED
+
// We're comparing strict equality of LLSD representation rather than
// performing any conversions. So if the types aren't equal, the LLSD
// values aren't equal.
@@ -864,6 +878,8 @@ namespace llsd
LLSD& drill(LLSD& blob, const LLSD& rawPath)
{
+ LL_PROFILE_ZONE_SCOPED
+
// Treat rawPath uniformly as an array. If it's not already an array,
// store it as the only entry in one. (But let's say Undefined means an
// empty array.)
@@ -889,6 +905,8 @@ LLSD& drill(LLSD& blob, const LLSD& rawPath)
// path entry that's bad.
for (LLSD::Integer i = 0; i < path.size(); ++i)
{
+ LL_PROFILE_ZONE_NUM( i )
+
const LLSD& key{path[i]};
if (key.isString())
{
@@ -917,6 +935,8 @@ LLSD& drill(LLSD& blob, const LLSD& rawPath)
LLSD drill(const LLSD& blob, const LLSD& path)
{
+ LL_PROFILE_ZONE_SCOPED
+
// non-const drill() does exactly what we want. Temporarily cast away
// const-ness and use that.
return drill(const_cast<LLSD&>(blob), path);
@@ -929,6 +949,8 @@ LLSD drill(const LLSD& blob, const LLSD& path)
// filter may be include to exclude/include keys in a map.
LLSD llsd_clone(LLSD value, LLSD filter)
{
+ LL_PROFILE_ZONE_SCOPED
+
LLSD clone;
bool has_filter(filter.isMap());
diff --git a/indra/llcommon/llsys.cpp b/indra/llcommon/llsys.cpp
index 4e61fb8a58..6d5d043e8d 100644
--- a/indra/llcommon/llsys.cpp
+++ b/indra/llcommon/llsys.cpp
@@ -861,6 +861,7 @@ LLSD LLMemoryInfo::getStatsMap() const
LLMemoryInfo& LLMemoryInfo::refresh()
{
+ LL_PROFILE_ZONE_SCOPED
mStatsMap = loadStatsMap();
LL_DEBUGS("LLMemoryInfo") << "Populated mStatsMap:\n";
diff --git a/indra/llcommon/llthread.cpp b/indra/llcommon/llthread.cpp
index a8cc750437..11f5a015f1 100644
--- a/indra/llcommon/llthread.cpp
+++ b/indra/llcommon/llthread.cpp
@@ -333,6 +333,7 @@ bool LLThread::runCondition(void)
// Stop thread execution if requested until unpaused.
void LLThread::checkPause()
{
+ LL_PROFILE_ZONE_SCOPED
mDataLock->lock();
// This is in a while loop because the pthread API allows for spurious wakeups.
@@ -364,17 +365,20 @@ void LLThread::setQuitting()
// static
LLThread::id_t LLThread::currentID()
{
+ LL_PROFILE_ZONE_SCOPED
return std::this_thread::get_id();
}
// static
void LLThread::yield()
{
+ LL_PROFILE_ZONE_SCOPED
std::this_thread::yield();
}
void LLThread::wake()
{
+ LL_PROFILE_ZONE_SCOPED
mDataLock->lock();
if(!shouldSleep())
{
@@ -385,6 +389,7 @@ void LLThread::wake()
void LLThread::wakeLocked()
{
+ LL_PROFILE_ZONE_SCOPED
if(!shouldSleep())
{
mRunCondition->signal();
@@ -393,11 +398,13 @@ void LLThread::wakeLocked()
void LLThread::lockData()
{
+ LL_PROFILE_ZONE_SCOPED
mDataLock->lock();
}
void LLThread::unlockData()
{
+ LL_PROFILE_ZONE_SCOPED
mDataLock->unlock();
}
diff --git a/indra/llcommon/lluuid.h b/indra/llcommon/lluuid.h
index fe7482ba29..86a396ab06 100644
--- a/indra/llcommon/lluuid.h
+++ b/indra/llcommon/lluuid.h
@@ -184,6 +184,17 @@ struct boost::hash<LLUUID>
}
};
+// Adapt boost hash to std hash
+namespace std
+{
+ template<> struct hash<LLUUID>
+ {
+ std::size_t operator()(LLUUID const& s) const noexcept
+ {
+ return boost::hash<LLUUID>()(s);
+ }
+ };
+}
#endif