summaryrefslogtreecommitdiff
path: root/indra/llcommon/lltraceaccumulators.cpp
diff options
context:
space:
mode:
authorAndrey Lihatskiy <alihatskiy@productengine.com>2024-04-10 03:50:44 +0300
committerAndrey Lihatskiy <alihatskiy@productengine.com>2024-04-10 03:50:44 +0300
commit0acee937f55e6d1a198be2549d5cb55a0403dd4d (patch)
tree2deaa98bcd99fd2860da758a92816c3bdb838acd /indra/llcommon/lltraceaccumulators.cpp
parenta902138de15067a86a6aeb02fdabd094873da0b2 (diff)
parentda9a1dcb55548a249ff7a1255f3e518696b81245 (diff)
Merge branch 'main' into marchcat/c-merge
# Conflicts: # indra/newview/skins/default/xui/en/floater_inventory_item_properties.xml
Diffstat (limited to 'indra/llcommon/lltraceaccumulators.cpp')
-rw-r--r--indra/llcommon/lltraceaccumulators.cpp71
1 files changed, 21 insertions, 50 deletions
diff --git a/indra/llcommon/lltraceaccumulators.cpp b/indra/llcommon/lltraceaccumulators.cpp
index 6bd886ae98..b5b32cba38 100644
--- a/indra/llcommon/lltraceaccumulators.cpp
+++ b/indra/llcommon/lltraceaccumulators.cpp
@@ -1,24 +1,24 @@
-/**
+/**
* @file lltracesampler.cpp
*
* $LicenseInfo:firstyear=2001&license=viewerlgpl$
* Second Life Viewer Source Code
* Copyright (C) 2012, Linden Research, Inc.
- *
+ *
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License only.
- *
+ *
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- *
+ *
* Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
* $/LicenseInfo$
*/
@@ -32,73 +32,52 @@
namespace LLTrace
{
-extern MemStatHandle gTraceMemStat;
-
-
///////////////////////////////////////////////////////////////////////
// AccumulatorBufferGroup
///////////////////////////////////////////////////////////////////////
-AccumulatorBufferGroup::AccumulatorBufferGroup()
+AccumulatorBufferGroup::AccumulatorBufferGroup()
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
- claim_alloc(gTraceMemStat, mCounts.capacity() * sizeof(CountAccumulator));
- claim_alloc(gTraceMemStat, mSamples.capacity() * sizeof(SampleAccumulator));
- claim_alloc(gTraceMemStat, mEvents.capacity() * sizeof(EventAccumulator));
- claim_alloc(gTraceMemStat, mStackTimers.capacity() * sizeof(TimeBlockAccumulator));
- claim_alloc(gTraceMemStat, mMemStats.capacity() * sizeof(MemAccumulator));
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
}
AccumulatorBufferGroup::AccumulatorBufferGroup(const AccumulatorBufferGroup& other)
: mCounts(other.mCounts),
mSamples(other.mSamples),
mEvents(other.mEvents),
- mStackTimers(other.mStackTimers),
- mMemStats(other.mMemStats)
+ mStackTimers(other.mStackTimers)
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
- claim_alloc(gTraceMemStat, mCounts.capacity() * sizeof(CountAccumulator));
- claim_alloc(gTraceMemStat, mSamples.capacity() * sizeof(SampleAccumulator));
- claim_alloc(gTraceMemStat, mEvents.capacity() * sizeof(EventAccumulator));
- claim_alloc(gTraceMemStat, mStackTimers.capacity() * sizeof(TimeBlockAccumulator));
- claim_alloc(gTraceMemStat, mMemStats.capacity() * sizeof(MemAccumulator));
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
}
AccumulatorBufferGroup::~AccumulatorBufferGroup()
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
- disclaim_alloc(gTraceMemStat, mCounts.capacity() * sizeof(CountAccumulator));
- disclaim_alloc(gTraceMemStat, mSamples.capacity() * sizeof(SampleAccumulator));
- disclaim_alloc(gTraceMemStat, mEvents.capacity() * sizeof(EventAccumulator));
- disclaim_alloc(gTraceMemStat, mStackTimers.capacity() * sizeof(TimeBlockAccumulator));
- disclaim_alloc(gTraceMemStat, mMemStats.capacity() * sizeof(MemAccumulator));
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
}
void AccumulatorBufferGroup::handOffTo(AccumulatorBufferGroup& other)
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
other.mCounts.reset(&mCounts);
other.mSamples.reset(&mSamples);
other.mEvents.reset(&mEvents);
other.mStackTimers.reset(&mStackTimers);
- other.mMemStats.reset(&mMemStats);
}
void AccumulatorBufferGroup::makeCurrent()
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
mCounts.makeCurrent();
mSamples.makeCurrent();
mEvents.makeCurrent();
mStackTimers.makeCurrent();
- mMemStats.makeCurrent();
ThreadRecorder* thread_recorder = get_thread_recorder();
AccumulatorBuffer<TimeBlockAccumulator>& timer_accumulator_buffer = mStackTimers;
// update stacktimer parent pointers
for (size_t i = 0, end_i = mStackTimers.size(); i < end_i; i++)
{
- TimeBlockTreeNode* tree_node = thread_recorder->getTimeBlockTreeNode(narrow(i));
+ TimeBlockTreeNode* tree_node = thread_recorder->getTimeBlockTreeNode(narrow<size_t>(i));
if (tree_node)
{
timer_accumulator_buffer[i].mParent = tree_node->mParent;
@@ -109,12 +88,11 @@ void AccumulatorBufferGroup::makeCurrent()
//static
void AccumulatorBufferGroup::clearCurrent()
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
- AccumulatorBuffer<CountAccumulator>::clearCurrent();
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ AccumulatorBuffer<CountAccumulator>::clearCurrent();
AccumulatorBuffer<SampleAccumulator>::clearCurrent();
AccumulatorBuffer<EventAccumulator>::clearCurrent();
AccumulatorBuffer<TimeBlockAccumulator>::clearCurrent();
- AccumulatorBuffer<MemAccumulator>::clearCurrent();
}
bool AccumulatorBufferGroup::isCurrent() const
@@ -124,44 +102,39 @@ bool AccumulatorBufferGroup::isCurrent() const
void AccumulatorBufferGroup::append( const AccumulatorBufferGroup& other )
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
mCounts.addSamples(other.mCounts, SEQUENTIAL);
mSamples.addSamples(other.mSamples, SEQUENTIAL);
mEvents.addSamples(other.mEvents, SEQUENTIAL);
- mMemStats.addSamples(other.mMemStats, SEQUENTIAL);
mStackTimers.addSamples(other.mStackTimers, SEQUENTIAL);
}
void AccumulatorBufferGroup::merge( const AccumulatorBufferGroup& other)
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
mCounts.addSamples(other.mCounts, NON_SEQUENTIAL);
mSamples.addSamples(other.mSamples, NON_SEQUENTIAL);
mEvents.addSamples(other.mEvents, NON_SEQUENTIAL);
- mMemStats.addSamples(other.mMemStats, NON_SEQUENTIAL);
// for now, hold out timers from merge, need to be displayed per thread
//mStackTimers.addSamples(other.mStackTimers, NON_SEQUENTIAL);
}
void AccumulatorBufferGroup::reset(AccumulatorBufferGroup* other)
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
mCounts.reset(other ? &other->mCounts : NULL);
mSamples.reset(other ? &other->mSamples : NULL);
mEvents.reset(other ? &other->mEvents : NULL);
mStackTimers.reset(other ? &other->mStackTimers : NULL);
- mMemStats.reset(other ? &other->mMemStats : NULL);
}
void AccumulatorBufferGroup::sync()
{
- LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
+ LL_PROFILE_ZONE_SCOPED_CATEGORY_STATS;
if (isCurrent())
{
F64SecondsImplicit time_stamp = LLTimer::getTotalSeconds();
-
mSamples.sync(time_stamp);
- mMemStats.sync(time_stamp);
}
}
@@ -197,10 +170,9 @@ F64 SampleAccumulator::mergeSumsOfSquares(const SampleAccumulator& a, const Samp
return a.getSumOfSquares();
}
-
void SampleAccumulator::addSamples( const SampleAccumulator& other, EBufferAppendType append_type )
{
- if (append_type == NON_SEQUENTIAL)
+ if (append_type == NON_SEQUENTIAL)
{
return;
}
@@ -299,7 +271,7 @@ void EventAccumulator::addSamples( const EventAccumulator& other, EBufferAppendT
void EventAccumulator::reset( const EventAccumulator* other )
{
- mNumSamples = 0;
+ mNumSamples = 0;
mSum = 0;
mMin = F32(NaN);
mMax = F32(NaN);
@@ -308,5 +280,4 @@ void EventAccumulator::reset( const EventAccumulator* other )
mLastValue = other ? other->mLastValue : NaN;
}
-
}