summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Linden <none@none>2013-10-21 14:22:21 -0700
committerRichard Linden <none@none>2013-10-21 14:22:21 -0700
commit1dfba44b3dc14564c99333dedb7a380a160aee44 (patch)
treef22164d84706d8d3771632a4eed55ff95e298d7d
parent54c9aba3d980b6cee4c8025bafe381ea16638d34 (diff)
fixed things so that trace recordings can be read from even while active
-rw-r--r--indra/llcommon/lltraceaccumulators.cpp102
-rw-r--r--indra/llcommon/lltraceaccumulators.h8
-rw-r--r--indra/llcommon/lltracerecording.cpp279
-rw-r--r--indra/llcommon/lltracerecording.h2
-rw-r--r--indra/llcommon/lltracethreadrecorder.cpp3
-rw-r--r--indra/llcommon/lltracethreadrecorder.h2
-rw-r--r--indra/llcommon/tests/lltrace_test.cpp5
7 files changed, 299 insertions, 102 deletions
diff --git a/indra/llcommon/lltraceaccumulators.cpp b/indra/llcommon/lltraceaccumulators.cpp
index 7d0e63e76a..385d31edd7 100644
--- a/indra/llcommon/lltraceaccumulators.cpp
+++ b/indra/llcommon/lltraceaccumulators.cpp
@@ -155,6 +155,39 @@ void AccumulatorBufferGroup::sync()
}
}
+F64 SampleAccumulator::mergeSumsOfSquares(const SampleAccumulator& a, const SampleAccumulator& b)
+{
+ const F64 epsilon = 0.0000001;
+
+ if (a.getSamplingTime() > epsilon && b.getSamplingTime() > epsilon)
+ {
+ // combine variance (and hence standard deviation) of 2 different sized sample groups using
+ // the following formula: http://www.mrc-bsu.cam.ac.uk/cochrane/handbook/chapter_7/7_7_3_8_combining_groups.htm
+ F64 n_1 = a.getSamplingTime(),
+ n_2 = b.getSamplingTime();
+ F64 m_1 = a.getMean(),
+ m_2 = b.getMean();
+ F64 v_1 = a.getSumOfSquares() / a.getSamplingTime(),
+ v_2 = b.getSumOfSquares() / b.getSamplingTime();
+ if (n_1 < epsilon)
+ {
+ return b.getSumOfSquares();
+ }
+ else
+ {
+ return a.getSamplingTime()
+ * ((((n_1 - epsilon) * v_1)
+ + ((n_2 - epsilon) * v_2)
+ + (((n_1 * n_2) / (n_1 + n_2))
+ * ((m_1 * m_1) + (m_2 * m_2) - (2.f * m_1 * m_2))))
+ / (n_1 + n_2 - epsilon));
+ }
+ }
+
+ return a.getSumOfSquares();
+}
+
+
void SampleAccumulator::addSamples( const SampleAccumulator& other, EBufferAppendType append_type )
{
if (append_type == NON_SEQUENTIAL)
@@ -180,37 +213,8 @@ void SampleAccumulator::addSamples( const SampleAccumulator& other, EBufferAppen
if (other.mMin < mMin) { mMin = other.mMin; }
if (other.mMax > mMax) { mMax = other.mMax; }
- F64 epsilon = 0.0000001;
+ mSumOfSquares = mergeSumsOfSquares(*this, other);
- if (other.mTotalSamplingTime > epsilon && mTotalSamplingTime > epsilon)
- {
- // combine variance (and hence standard deviation) of 2 different sized sample groups using
- // the following formula: http://www.mrc-bsu.cam.ac.uk/cochrane/handbook/chapter_7/7_7_3_8_combining_groups.htm
- F64 n_1 = mTotalSamplingTime,
- n_2 = other.mTotalSamplingTime;
- F64 m_1 = mMean,
- m_2 = other.mMean;
- F64 v_1 = mSumOfSquares / mTotalSamplingTime,
- v_2 = other.mSumOfSquares / other.mTotalSamplingTime;
- if (n_1 < epsilon)
- {
- mSumOfSquares = other.mSumOfSquares;
- }
- else
- {
- mSumOfSquares = mTotalSamplingTime
- * ((((n_1 - epsilon) * v_1)
- + ((n_2 - epsilon) * v_2)
- + (((n_1 * n_2) / (n_1 + n_2))
- * ((m_1 * m_1) + (m_2 * m_2) - (2.f * m_1 * m_2))))
- / (n_1 + n_2 - epsilon));
- }
-
- F64 weight = mTotalSamplingTime / (mTotalSamplingTime + other.mTotalSamplingTime);
- mNumSamples += other.mNumSamples;
- mTotalSamplingTime += other.mTotalSamplingTime;
- mMean = (mMean * weight) + (other.mMean * (1.0 - weight));
- }
if (append_type == SEQUENTIAL)
{
mLastValue = other.mLastValue;
@@ -234,6 +238,29 @@ void SampleAccumulator::reset( const SampleAccumulator* other )
mTotalSamplingTime = 0;
}
+F64 EventAccumulator::mergeSumsOfSquares(const EventAccumulator& a, const EventAccumulator& b)
+{
+ if (a.mNumSamples && b.mNumSamples)
+ {
+ // combine variance (and hence standard deviation) of 2 different sized sample groups using
+ // the following formula: http://www.mrc-bsu.cam.ac.uk/cochrane/handbook/chapter_7/7_7_3_8_combining_groups.htm
+ F64 n_1 = a.mNumSamples,
+ n_2 = b.mNumSamples;
+ F64 m_1 = a.mMean,
+ m_2 = b.mMean;
+ F64 v_1 = a.mSumOfSquares / a.mNumSamples,
+ v_2 = b.mSumOfSquares / b.mNumSamples;
+ return (F64)a.mNumSamples
+ * ((((n_1 - 1.f) * v_1)
+ + ((n_2 - 1.f) * v_2)
+ + (((n_1 * n_2) / (n_1 + n_2))
+ * ((m_1 * m_1) + (m_2 * m_2) - (2.f * m_1 * m_2))))
+ / (n_1 + n_2 - 1.f));
+ }
+
+ return a.mSumOfSquares;
+}
+
void EventAccumulator::addSamples( const EventAccumulator& other, EBufferAppendType append_type )
{
if (other.mNumSamples)
@@ -250,20 +277,7 @@ void EventAccumulator::addSamples( const EventAccumulator& other, EBufferAppendT
if (other.mMin < mMin) { mMin = other.mMin; }
if (other.mMax > mMax) { mMax = other.mMax; }
- // combine variance (and hence standard deviation) of 2 different sized sample groups using
- // the following formula: http://www.mrc-bsu.cam.ac.uk/cochrane/handbook/chapter_7/7_7_3_8_combining_groups.htm
- F64 n_1 = (F64)mNumSamples,
- n_2 = (F64)other.mNumSamples;
- F64 m_1 = mMean,
- m_2 = other.mMean;
- F64 v_1 = mSumOfSquares / mNumSamples,
- v_2 = other.mSumOfSquares / other.mNumSamples;
- mSumOfSquares = (F64)mNumSamples
- * ((((n_1 - 1.f) * v_1)
- + ((n_2 - 1.f) * v_2)
- + (((n_1 * n_2) / (n_1 + n_2))
- * ((m_1 * m_1) + (m_2 * m_2) - (2.f * m_1 * m_2))))
- / (n_1 + n_2 - 1.f));
+ mSumOfSquares = mergeSumsOfSquares(*this, other);
F64 weight = (F64)mNumSamples / (F64)(mNumSamples + other.mNumSamples);
mNumSamples += other.mNumSamples;
diff --git a/indra/llcommon/lltraceaccumulators.h b/indra/llcommon/lltraceaccumulators.h
index 2971907849..dfa037d7c0 100644
--- a/indra/llcommon/lltraceaccumulators.h
+++ b/indra/llcommon/lltraceaccumulators.h
@@ -276,6 +276,9 @@ namespace LLTrace
S32 getSampleCount() const { return mNumSamples; }
bool hasValue() const { return mNumSamples > 0; }
+ // helper utility to calculate combined sumofsquares total
+ static F64 mergeSumsOfSquares(const EventAccumulator& a, const EventAccumulator& b);
+
private:
F64 mSum,
mLastValue;
@@ -359,10 +362,13 @@ namespace LLTrace
F64 getMean() const { return mMean; }
F64 getStandardDeviation() const { return sqrtf(mSumOfSquares / mTotalSamplingTime); }
F64 getSumOfSquares() const { return mSumOfSquares; }
- F64SecondsImplicit getSamplingTime() { return mTotalSamplingTime; }
+ F64SecondsImplicit getSamplingTime() const { return mTotalSamplingTime; }
S32 getSampleCount() const { return mNumSamples; }
bool hasValue() const { return mHasValue; }
+ // helper utility to calculate combined sumofsquares total
+ static F64 mergeSumsOfSquares(const SampleAccumulator& a, const SampleAccumulator& b);
+
private:
F64 mSum,
mLastValue;
diff --git a/indra/llcommon/lltracerecording.cpp b/indra/llcommon/lltracerecording.cpp
index 5ec7ce56b9..0fd0053240 100644
--- a/indra/llcommon/lltracerecording.cpp
+++ b/indra/llcommon/lltracerecording.cpp
@@ -32,6 +32,11 @@
#include "lltracethreadrecorder.h"
#include "llthread.h"
+inline F64 lerp(F64 a, F64 b, F64 u)
+{
+ return a + ((b - a) * u);
+}
+
namespace LLTrace
{
@@ -43,7 +48,7 @@ extern MemStatHandle gTraceMemStat;
Recording::Recording(EPlayState state)
: mElapsedSeconds(0),
- mInHandOff(false)
+ mActiveBuffers(NULL)
{
claim_alloc(gTraceMemStat, this);
mBuffers = new AccumulatorBufferGroup();
@@ -88,13 +93,20 @@ Recording::~Recording()
}
}
+// brings recording to front of recorder stack, with up to date info
void Recording::update()
{
if (isStarted())
{
mElapsedSeconds += mSamplingTimer.getElapsedTimeF64();
- AccumulatorBufferGroup* buffers = mBuffers.write();
- LLTrace::get_thread_recorder()->bringUpToDate(buffers);
+
+ llassert(mActiveBuffers);
+ if(!mActiveBuffers->isCurrent())
+ {
+ AccumulatorBufferGroup* buffers = mBuffers.write();
+ LLTrace::get_thread_recorder()->deactivate(buffers);
+ mActiveBuffers = LLTrace::get_thread_recorder()->activate(buffers);
+ }
mSamplingTimer.reset();
}
@@ -112,20 +124,19 @@ void Recording::handleStart()
{
mSamplingTimer.reset();
mBuffers.setStayUnique(true);
- LLTrace::get_thread_recorder()->activate(mBuffers.write(), mInHandOff);
- mInHandOff = false;
+ mActiveBuffers = LLTrace::get_thread_recorder()->activate(mBuffers.write());
}
void Recording::handleStop()
{
mElapsedSeconds += mSamplingTimer.getElapsedTimeF64();
LLTrace::get_thread_recorder()->deactivate(mBuffers.write());
+ mActiveBuffers = NULL;
mBuffers.setStayUnique(false);
}
void Recording::handleSplitTo(Recording& other)
{
- other.mInHandOff = true;
mBuffers.write()->handOffTo(*other.mBuffers.write());
}
@@ -139,214 +150,378 @@ void Recording::appendRecording( Recording& other )
bool Recording::hasValue(const StatType<TimeBlockAccumulator>& stat)
{
- return mBuffers->mStackTimers[stat.getIndex()].hasValue();
+ update();
+ const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
+ return accumulator.hasValue() || (active_accumulator && active_accumulator->hasValue());
}
F64Seconds Recording::getSum(const StatType<TimeBlockAccumulator>& stat)
{
+ update();
const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
- return F64Seconds((F64)(accumulator.mTotalTimeCounter)
- / (F64)LLTrace::BlockTimerStatHandle::countsPerSecond());
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
+ return F64Seconds((F64)(accumulator.mTotalTimeCounter) + (F64)(active_accumulator ? active_accumulator->mTotalTimeCounter : 0))
+ / (F64)LLTrace::BlockTimerStatHandle::countsPerSecond();
}
F64Seconds Recording::getSum(const StatType<TimeBlockAccumulator::SelfTimeFacet>& stat)
{
+ update();
const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
- return F64Seconds((F64)(accumulator.mSelfTimeCounter) / (F64)LLTrace::BlockTimerStatHandle::countsPerSecond());
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
+ return F64Seconds((F64)(accumulator.mSelfTimeCounter) + (F64)(active_accumulator ? active_accumulator->mSelfTimeCounter : 0) / (F64)LLTrace::BlockTimerStatHandle::countsPerSecond());
}
S32 Recording::getSum(const StatType<TimeBlockAccumulator::CallCountFacet>& stat)
{
- return mBuffers->mStackTimers[stat.getIndex()].mCalls;
+ update();
+ const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
+ return accumulator.mCalls + (active_accumulator ? active_accumulator->mCalls : 0);
}
F64Seconds Recording::getPerSec(const StatType<TimeBlockAccumulator>& stat)
{
+ update();
const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
- return F64Seconds((F64)(accumulator.mTotalTimeCounter)
+ return F64Seconds((F64)(accumulator.mTotalTimeCounter + (active_accumulator ? active_accumulator->mTotalTimeCounter : 0))
/ ((F64)LLTrace::BlockTimerStatHandle::countsPerSecond() * mElapsedSeconds.value()));
}
F64Seconds Recording::getPerSec(const StatType<TimeBlockAccumulator::SelfTimeFacet>& stat)
{
+ update();
const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
- return F64Seconds((F64)(accumulator.mSelfTimeCounter)
+ return F64Seconds((F64)(accumulator.mSelfTimeCounter + (active_accumulator ? active_accumulator->mSelfTimeCounter : 0))
/ ((F64)LLTrace::BlockTimerStatHandle::countsPerSecond() * mElapsedSeconds.value()));
}
F32 Recording::getPerSec(const StatType<TimeBlockAccumulator::CallCountFacet>& stat)
{
- return (F32)mBuffers->mStackTimers[stat.getIndex()].mCalls / mElapsedSeconds.value();
+ update();
+ const TimeBlockAccumulator& accumulator = mBuffers->mStackTimers[stat.getIndex()];
+ const TimeBlockAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mStackTimers[stat.getIndex()] : NULL;
+ return (F32)(accumulator.mCalls + (active_accumulator ? active_accumulator->mCalls : 0)) / mElapsedSeconds.value();
}
bool Recording::hasValue(const StatType<MemAccumulator>& stat)
{
- return mBuffers->mMemStats[stat.getIndex()].mSize.hasValue();
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return accumulator.mSize.hasValue() || (active_accumulator && active_accumulator->mSize.hasValue() ? active_accumulator->mSize.hasValue() : false);
}
F64Kilobytes Recording::getMin(const StatType<MemAccumulator>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mSize.getMin());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes(llmin(accumulator.mSize.getMin(), (active_accumulator && active_accumulator->mSize.hasValue() ? active_accumulator->mSize.getMin() : F32_MAX)));
}
F64Kilobytes Recording::getMean(const StatType<MemAccumulator>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mSize.getMean());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+
+ if (active_accumulator && active_accumulator->mSize.hasValue())
+ {
+ return F64Bytes(lerp(accumulator.mSize.getMean(), active_accumulator->mSize.getMean(), active_accumulator->mSize.getSampleCount() / (accumulator.mSize.getSampleCount() + active_accumulator->mSize.getSampleCount())));
+ }
+ else
+ {
+ return F64Bytes(accumulator.mSize.getMean());
+ }
}
F64Kilobytes Recording::getMax(const StatType<MemAccumulator>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mSize.getMax());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes(llmax(accumulator.mSize.getMax(), active_accumulator && active_accumulator->mSize.hasValue() ? active_accumulator->mSize.getMax() : F32_MIN));
}
F64Kilobytes Recording::getStandardDeviation(const StatType<MemAccumulator>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mSize.getStandardDeviation());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ if (active_accumulator && active_accumulator->hasValue())
+ {
+ F64 sum_of_squares = SampleAccumulator::mergeSumsOfSquares(accumulator.mSize, active_accumulator->mSize);
+ return F64Bytes(sqrtf(sum_of_squares / (accumulator.mSize.getSamplingTime().value() + active_accumulator->mSize.getSamplingTime().value())));
+ }
+ else
+ {
+ return F64Bytes(accumulator.mSize.getStandardDeviation());
+ }
}
F64Kilobytes Recording::getLastValue(const StatType<MemAccumulator>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mSize.getLastValue());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes(active_accumulator ? active_accumulator->mSize.getLastValue() : accumulator.mSize.getLastValue());
}
bool Recording::hasValue(const StatType<MemAccumulator::AllocationFacet>& stat)
{
- return mBuffers->mMemStats[stat.getIndex()].mAllocations.hasValue();
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return accumulator.mAllocations.hasValue() || (active_accumulator ? active_accumulator->mAllocations.hasValue() : false);
}
F64Kilobytes Recording::getSum(const StatType<MemAccumulator::AllocationFacet>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mAllocations.getSum());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes(accumulator.mAllocations.getSum() + (active_accumulator ? active_accumulator->mAllocations.getSum() : 0));
}
F64Kilobytes Recording::getPerSec(const StatType<MemAccumulator::AllocationFacet>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mAllocations.getSum() / mElapsedSeconds.value());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes((accumulator.mAllocations.getSum() + (active_accumulator ? active_accumulator->mAllocations.getSum() : 0)) / mElapsedSeconds.value());
}
S32 Recording::getSampleCount(const StatType<MemAccumulator::AllocationFacet>& stat)
{
- return mBuffers->mMemStats[stat.getIndex()].mAllocations.getSampleCount();
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return accumulator.mAllocations.getSampleCount() + (active_accumulator ? active_accumulator->mAllocations.getSampleCount() : 0);
}
bool Recording::hasValue(const StatType<MemAccumulator::DeallocationFacet>& stat)
{
- return mBuffers->mMemStats[stat.getIndex()].mDeallocations.hasValue();
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return accumulator.mDeallocations.hasValue() || (active_accumulator ? active_accumulator->mDeallocations.hasValue() : false);
}
F64Kilobytes Recording::getSum(const StatType<MemAccumulator::DeallocationFacet>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mDeallocations.getSum());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes(accumulator.mDeallocations.getSum() + (active_accumulator ? active_accumulator->mDeallocations.getSum() : 0));
}
F64Kilobytes Recording::getPerSec(const StatType<MemAccumulator::DeallocationFacet>& stat)
{
- return F64Bytes(mBuffers->mMemStats[stat.getIndex()].mDeallocations.getSum() / mElapsedSeconds.value());
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return F64Bytes((accumulator.mDeallocations.getSum() + (active_accumulator ? active_accumulator->mDeallocations.getSum() : 0)) / mElapsedSeconds.value());
}
S32 Recording::getSampleCount(const StatType<MemAccumulator::DeallocationFacet>& stat)
{
- return mBuffers->mMemStats[stat.getIndex()].mDeallocations.getSampleCount();
+ update();
+ const MemAccumulator& accumulator = mBuffers->mMemStats[stat.getIndex()];
+ const MemAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mMemStats[stat.getIndex()] : NULL;
+ return accumulator.mDeallocations.getSampleCount() + (active_accumulator ? active_accumulator->mDeallocations.getSampleCount() : 0);
}
bool Recording::hasValue(const StatType<CountAccumulator>& stat)
{
- return mBuffers->mCounts[stat.getIndex()].hasValue();
+ update();
+ const CountAccumulator& accumulator = mBuffers->mCounts[stat.getIndex()];
+ const CountAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mCounts[stat.getIndex()] : NULL;
+ return accumulator.hasValue() || (active_accumulator ? active_accumulator->hasValue() : false);
}
F64 Recording::getSum(const StatType<CountAccumulator>& stat)
{
- return mBuffers->mCounts[stat.getIndex()].getSum();
-}
-
-F64 Recording::getSum( const StatType<EventAccumulator>& stat)
-{
- return (F64)mBuffers->mEvents[stat.getIndex()].getSum();
+ update();
+ const CountAccumulator& accumulator = mBuffers->mCounts[stat.getIndex()];
+ const CountAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mCounts[stat.getIndex()] : NULL;
+ return accumulator.getSum() + (active_accumulator ? active_accumulator->getSum() : 0);
}
F64 Recording::getPerSec( const StatType<CountAccumulator>& stat )
{
- F64 sum = mBuffers->mCounts[stat.getIndex()].getSum();
- return sum / mElapsedSeconds.value();
+ update();
+ const CountAccumulator& accumulator = mBuffers->mCounts[stat.getIndex()];
+ const CountAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mCounts[stat.getIndex()] : NULL;
+ F64 sum = accumulator.getSum() + (active_accumulator ? active_accumulator->getSum() : 0);
+ return sum / mElapsedSeconds.value();
}
S32 Recording::getSampleCount( const StatType<CountAccumulator>& stat )
{
- return mBuffers->mCounts[stat.getIndex()].getSampleCount();
+ update();
+ const CountAccumulator& accumulator = mBuffers->mCounts[stat.getIndex()];
+ const CountAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mCounts[stat.getIndex()] : NULL;
+ return accumulator.getSampleCount() + (active_accumulator ? active_accumulator->getSampleCount() : 0);
}
bool Recording::hasValue(const StatType<SampleAccumulator>& stat)
{
- return mBuffers->mSamples[stat.getIndex()].hasValue();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+ return accumulator.hasValue() || (active_accumulator && active_accumulator->hasValue());
}
F64 Recording::getMin( const StatType<SampleAccumulator>& stat )
{
- return mBuffers->mSamples[stat.getIndex()].getMin();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+ return llmin(accumulator.getMin(), active_accumulator && active_accumulator->hasValue() ? active_accumulator->getMin() : F32_MAX);
}
F64 Recording::getMax( const StatType<SampleAccumulator>& stat )
{
- return mBuffers->mSamples[stat.getIndex()].getMax();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+ return llmax(accumulator.getMax(), active_accumulator && active_accumulator->hasValue() ? active_accumulator->getMax() : F32_MIN);
}
F64 Recording::getMean( const StatType<SampleAccumulator>& stat )
{
- return mBuffers->mSamples[stat.getIndex()].getMean();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+ if (active_accumulator && active_accumulator->hasValue())
+ {
+ return lerp(accumulator.getMean(), active_accumulator->getMean(), active_accumulator->getSampleCount() / (accumulator.getSampleCount() + active_accumulator->getSampleCount()));
+ }
+ else
+ {
+ return accumulator.getMean();
+ }
}
F64 Recording::getStandardDeviation( const StatType<SampleAccumulator>& stat )
{
- return mBuffers->mSamples[stat.getIndex()].getStandardDeviation();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+
+ if (active_accumulator && active_accumulator->hasValue())
+ {
+ F64 sum_of_squares = SampleAccumulator::mergeSumsOfSquares(accumulator, *active_accumulator);
+ return sqrtf(sum_of_squares / (accumulator.getSamplingTime() + active_accumulator->getSamplingTime()));
+ }
+ else
+ {
+ return accumulator.getStandardDeviation();
+ }
}
F64 Recording::getLastValue( const StatType<SampleAccumulator>& stat )
{
- return mBuffers->mSamples[stat.getIndex()].getLastValue();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+ return (active_accumulator && active_accumulator->hasValue() ? active_accumulator->getLastValue() : accumulator.getLastValue());
}
S32 Recording::getSampleCount( const StatType<SampleAccumulator>& stat )
{
- return mBuffers->mSamples[stat.getIndex()].getSampleCount();
+ update();
+ const SampleAccumulator& accumulator = mBuffers->mSamples[stat.getIndex()];
+ const SampleAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mSamples[stat.getIndex()] : NULL;
+ return accumulator.getSampleCount() + (active_accumulator && active_accumulator->hasValue() ? active_accumulator->getSampleCount() : 0);
}
bool Recording::hasValue(const StatType<EventAccumulator>& stat)
{
- return mBuffers->mEvents[stat.getIndex()].hasValue();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ return accumulator.hasValue() || (active_accumulator && active_accumulator->hasValue());
+}
+
+F64 Recording::getSum( const StatType<EventAccumulator>& stat)
+{
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ return (F64)(accumulator.getSum() + (active_accumulator && active_accumulator->hasValue() ? active_accumulator->getSum() : 0));
}
F64 Recording::getMin( const StatType<EventAccumulator>& stat )
{
- return mBuffers->mEvents[stat.getIndex()].getMin();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ return llmin(accumulator.getMin(), active_accumulator && active_accumulator->hasValue() ? active_accumulator->getMin() : F32_MAX);
}
F64 Recording::getMax( const StatType<EventAccumulator>& stat )
{
- return mBuffers->mEvents[stat.getIndex()].getMax();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ return llmax(accumulator.getMax(), active_accumulator && active_accumulator->hasValue() ? active_accumulator->getMax() : F32_MIN);
}
F64 Recording::getMean( const StatType<EventAccumulator>& stat )
{
- return mBuffers->mEvents[stat.getIndex()].getMean();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ if (active_accumulator && active_accumulator->hasValue())
+ {
+ return lerp(accumulator.getMean(), active_accumulator->getMean(), active_accumulator->getSampleCount() / (accumulator.getSampleCount() + active_accumulator->getSampleCount()));
+ }
+ else
+ {
+ return accumulator.getMean();
+ }
}
F64 Recording::getStandardDeviation( const StatType<EventAccumulator>& stat )
{
- return mBuffers->mEvents[stat.getIndex()].getStandardDeviation();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+
+ if (active_accumulator && active_accumulator->hasValue())
+ {
+ F64 sum_of_squares = EventAccumulator::mergeSumsOfSquares(accumulator, *active_accumulator);
+ return sqrtf(sum_of_squares / (accumulator.getSampleCount() + active_accumulator->getSampleCount()));
+ }
+ else
+ {
+ return accumulator.getStandardDeviation();
+ }
}
F64 Recording::getLastValue( const StatType<EventAccumulator>& stat )
{
- return mBuffers->mEvents[stat.getIndex()].getLastValue();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ return active_accumulator ? active_accumulator->getLastValue() : accumulator.getLastValue();
}
S32 Recording::getSampleCount( const StatType<EventAccumulator>& stat )
{
- return mBuffers->mEvents[stat.getIndex()].getSampleCount();
+ update();
+ const EventAccumulator& accumulator = mBuffers->mEvents[stat.getIndex()];
+ const EventAccumulator* active_accumulator = mActiveBuffers ? &mActiveBuffers->mEvents[stat.getIndex()] : NULL;
+ return accumulator.getSampleCount() + (active_accumulator ? active_accumulator->getSampleCount() : 0);
}
///////////////////////////////////////////////////////////////////////
diff --git a/indra/llcommon/lltracerecording.h b/indra/llcommon/lltracerecording.h
index b045aafa11..93ac276e33 100644
--- a/indra/llcommon/lltracerecording.h
+++ b/indra/llcommon/lltracerecording.h
@@ -322,7 +322,7 @@ namespace LLTrace
LLTimer mSamplingTimer;
F64Seconds mElapsedSeconds;
LLCopyOnWritePointer<AccumulatorBufferGroup> mBuffers;
- bool mInHandOff;
+ AccumulatorBufferGroup* mActiveBuffers;
};
diff --git a/indra/llcommon/lltracethreadrecorder.cpp b/indra/llcommon/lltracethreadrecorder.cpp
index 7b7da5343d..a70e94e4b1 100644
--- a/indra/llcommon/lltracethreadrecorder.cpp
+++ b/indra/llcommon/lltracethreadrecorder.cpp
@@ -131,7 +131,7 @@ TimeBlockTreeNode* ThreadRecorder::getTimeBlockTreeNode( S32 index )
}
-void ThreadRecorder::activate( AccumulatorBufferGroup* recording, bool from_handoff )
+AccumulatorBufferGroup* ThreadRecorder::activate( AccumulatorBufferGroup* recording)
{
ActiveRecording* active_recording = new ActiveRecording(recording);
if (!mActiveRecordings.empty())
@@ -144,6 +144,7 @@ void ThreadRecorder::activate( AccumulatorBufferGroup* recording, bool from_hand
mActiveRecordings.push_back(active_recording);
mActiveRecordings.back()->mPartialRecording.makeCurrent();
+ return &active_recording->mPartialRecording;
}
ThreadRecorder::active_recording_list_t::iterator ThreadRecorder::bringUpToDate( AccumulatorBufferGroup* recording )
diff --git a/indra/llcommon/lltracethreadrecorder.h b/indra/llcommon/lltracethreadrecorder.h
index c6afcdac80..d30fa15ea7 100644
--- a/indra/llcommon/lltracethreadrecorder.h
+++ b/indra/llcommon/lltracethreadrecorder.h
@@ -47,7 +47,7 @@ namespace LLTrace
~ThreadRecorder();
- void activate(AccumulatorBufferGroup* recording, bool from_handoff = false);
+ AccumulatorBufferGroup* activate(AccumulatorBufferGroup* recording);
void deactivate(AccumulatorBufferGroup* recording);
active_recording_list_t::iterator bringUpToDate(AccumulatorBufferGroup* recording);
diff --git a/indra/llcommon/tests/lltrace_test.cpp b/indra/llcommon/tests/lltrace_test.cpp
index 8ce509699d..0a9d85ad00 100644
--- a/indra/llcommon/tests/lltrace_test.cpp
+++ b/indra/llcommon/tests/lltrace_test.cpp
@@ -109,8 +109,9 @@ namespace tut
at_work.stop();
drink_coffee(1, S32VentiCup(1));
}
- after_3pm.stop();
- all_day.stop();
+ // don't need to stop recordings to get accurate values out of them
+ //after_3pm.stop();
+ //all_day.stop();
ensure("count stats are counted when recording is active",
at_work.getSum(sCupsOfCoffeeConsumed) == 3