diff options
| author | Richard Linden <none@none> | 2014-02-06 11:27:16 -0800 | 
|---|---|---|
| committer | Richard Linden <none@none> | 2014-02-06 11:27:16 -0800 | 
| commit | 3040b429a3b136b87ddb0ae88ccfa3a7aa71e232 (patch) | |
| tree | 46bf70fda640f925c21d519a41a0d7ebd94c56a0 /indra/llcommon | |
| parent | 2ab11c8e34f2349500aef76cd8372ca889020728 (diff) | |
added LL_TRACE_ENABLED to allow disabling of lltrace
Diffstat (limited to 'indra/llcommon')
| -rwxr-xr-x | indra/llcommon/llfasttimer.cpp | 4 | ||||
| -rwxr-xr-x | indra/llcommon/llmemory.h | 2 | ||||
| -rw-r--r-- | indra/llcommon/lltrace.h | 40 | ||||
| -rw-r--r-- | indra/llcommon/lltracerecording.cpp | 12 | ||||
| -rw-r--r-- | indra/llcommon/lltracethreadrecorder.cpp | 26 | 
5 files changed, 82 insertions, 2 deletions
| diff --git a/indra/llcommon/llfasttimer.cpp b/indra/llcommon/llfasttimer.cpp index 9b093e8936..d46e257af7 100755 --- a/indra/llcommon/llfasttimer.cpp +++ b/indra/llcommon/llfasttimer.cpp @@ -294,6 +294,7 @@ static LLTrace::BlockTimerStatHandle FTM_PROCESS_TIMES("Process FastTimer Times"  //static  void BlockTimer::processTimes()  { +#if LL_TRACE_ENABLED  	LL_RECORD_BLOCK_TIME(FTM_PROCESS_TIMES);  	get_clock_count(); // good place to calculate clock frequency @@ -316,6 +317,7 @@ void BlockTimer::processTimes()  		accumulator.mLastCaller = NULL;  		accumulator.mMoveUpTree = false;  	} +#endif  }  std::vector<BlockTimerStatHandle*>::iterator BlockTimerStatHandle::beginChildren() @@ -449,6 +451,7 @@ TimeBlockAccumulator::TimeBlockAccumulator()  void TimeBlockAccumulator::addSamples( const TimeBlockAccumulator& other, EBufferAppendType append_type )  { +#if LL_TRACE_ENABLED  	// we can't merge two unrelated time block samples, as that will screw with the nested timings  	// due to the call hierarchy of each thread  	llassert(append_type == SEQUENTIAL); @@ -459,6 +462,7 @@ void TimeBlockAccumulator::addSamples( const TimeBlockAccumulator& other, EBuffe  	mActiveCount = other.mActiveCount;  	mMoveUpTree = other.mMoveUpTree;  	mParent = other.mParent; +#endif  }  void TimeBlockAccumulator::reset( const TimeBlockAccumulator* other ) diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h index 3de59350db..1aa095e71c 100755 --- a/indra/llcommon/llmemory.h +++ b/indra/llcommon/llmemory.h @@ -37,7 +37,7 @@ class LLMutex ;  #if LL_WINDOWS && LL_DEBUG  #define LL_CHECK_MEMORY llassert(_CrtCheckMemory());  #else -#define LL_CHECK_MEMORY +#define LL_CHECK_MEMORY _CrtCheckMemory();  #endif diff --git a/indra/llcommon/lltrace.h b/indra/llcommon/lltrace.h index 7e811efe71..5f1289dad8 100644 --- a/indra/llcommon/lltrace.h +++ b/indra/llcommon/lltrace.h @@ -38,6 +38,8 @@  #include "llpointer.h"  #include "llunits.h" +#define LL_TRACE_ENABLED 1 +  namespace LLTrace  {  class Recording; @@ -135,8 +137,10 @@ public:  template<typename T, typename VALUE_T>  void record(EventStatHandle<T>& measurement, VALUE_T value)  { +#if LL_TRACE_ENABLED  	T converted_value(value);  	measurement.getCurrentAccumulator().record(storage_value(converted_value)); +#endif  }  template <typename T = F64> @@ -158,8 +162,10 @@ public:  template<typename T, typename VALUE_T>  void sample(SampleStatHandle<T>& measurement, VALUE_T value)  { +#if LL_TRACE_ENABLED  	T converted_value(value);  	measurement.getCurrentAccumulator().sample(storage_value(converted_value)); +#endif  }  template <typename T = F64> @@ -181,8 +187,10 @@ public:  template<typename T, typename VALUE_T>  void add(CountStatHandle<T>& count, VALUE_T value)  { +#if LL_TRACE_ENABLED  	T converted_value(value);  	count.getCurrentAccumulator().add(storage_value(converted_value)); +#endif  }  template<> @@ -323,21 +331,25 @@ struct MeasureMem<std::basic_string<T>, IS_MEM_TRACKABLE, IS_BYTES>  template<typename T>  inline void claim_alloc(MemStatHandle& measurement, const T& value)  { +#if LL_TRACE_ENABLED  	S32 size = MeasureMem<T>::measureFootprint(value);  	if(size == 0) return;  	MemAccumulator& accumulator = measurement.getCurrentAccumulator();  	accumulator.mSize.sample(accumulator.mSize.hasValue() ? accumulator.mSize.getLastValue() + (F64)size : (F64)size);  	accumulator.mAllocations.record(size); +#endif  }  template<typename T>  inline void disclaim_alloc(MemStatHandle& measurement, const T& value)  { +#if LL_TRACE_ENABLED  	S32 size = MeasureMem<T>::measureFootprint(value);  	if(size == 0) return;  	MemAccumulator& accumulator = measurement.getCurrentAccumulator();  	accumulator.mSize.sample(accumulator.mSize.hasValue() ? accumulator.mSize.getLastValue() - (F64)size : -(F64)size);  	accumulator.mDeallocations.add(size); +#endif  }  template<typename DERIVED, size_t ALIGNMENT = LL_DEFAULT_HEAP_ALIGN> @@ -347,16 +359,21 @@ public:  	typedef void mem_trackable_tag_t;  	MemTrackableNonVirtual(const char* name) +#if LL_TRACE_ENABLED  	:	mMemFootprint(0) +#endif  	{ +#if LL_TRACE_ENABLED  		static bool name_initialized = false;  		if (!name_initialized)  		{  			name_initialized = true;  			sMemStat.setName(name);  		} +#endif  	} +#if LL_TRACE_ENABLED  	~MemTrackableNonVirtual()  	{  		disclaimMem(mMemFootprint); @@ -368,43 +385,55 @@ public:  	}  	S32 getMemFootprint() const	{ return mMemFootprint; } +#endif  	void* operator new(size_t size)   	{ +#if LL_TRACE_ENABLED  		claim_alloc(sMemStat, size); +#endif  		return ll_aligned_malloc<ALIGNMENT>(size);  	}  	template<int CUSTOM_ALIGNMENT>  	static void* aligned_new(size_t size)  	{ +#if LL_TRACE_ENABLED  		claim_alloc(sMemStat, size); +#endif  		return ll_aligned_malloc<CUSTOM_ALIGNMENT>(size);  	}  	void operator delete(void* ptr, size_t size)  	{ +#if LL_TRACE_ENABLED  		disclaim_alloc(sMemStat, size); +#endif  		ll_aligned_free<ALIGNMENT>(ptr);  	}  	template<int CUSTOM_ALIGNMENT>  	static void aligned_delete(void* ptr, size_t size)  	{ +#if LL_TRACE_ENABLED  		disclaim_alloc(sMemStat, size); +#endif  		ll_aligned_free<CUSTOM_ALIGNMENT>(ptr);  	} -  	void* operator new [](size_t size)  	{ +#if LL_TRACE_ENABLED  		claim_alloc(sMemStat, size); +#endif  		return ll_aligned_malloc<ALIGNMENT>(size);  	}  	void operator delete[](void* ptr, size_t size)  	{ +#if LL_TRACE_ENABLED  		disclaim_alloc(sMemStat, size); +#endif  		ll_aligned_free<ALIGNMENT>(ptr);  	} @@ -412,31 +441,40 @@ public:  	template<typename CLAIM_T>  	void claimMem(const CLAIM_T& value) const  	{ +#if LL_TRACE_ENABLED  		S32 size = MeasureMem<CLAIM_T>::measureFootprint(value);  		claim_alloc(sMemStat, size);  		mMemFootprint += size; +#endif  	}  	// remove memory we had claimed from our calculated footprint  	template<typename CLAIM_T>  	void disclaimMem(const CLAIM_T& value) const  	{ +#if LL_TRACE_ENABLED  		S32 size = MeasureMem<CLAIM_T>::measureFootprint(value);  		disclaim_alloc(sMemStat, size);  		mMemFootprint -= size; +#endif  	}  private: +#if LL_TRACE_ENABLED  	// use signed values so that we can temporarily go negative  	// and reconcile in destructor  	// NB: this assumes that no single class is responsible for > 2GB of allocations  	mutable S32 mMemFootprint;  	static	MemStatHandle	sMemStat; +#endif +  }; +#if LL_TRACE_ENABLED  template<typename DERIVED, size_t ALIGNMENT>  MemStatHandle MemTrackableNonVirtual<DERIVED, ALIGNMENT>::sMemStat(typeid(MemTrackableNonVirtual<DERIVED, ALIGNMENT>).name()); +#endif  template<typename DERIVED, size_t ALIGNMENT = LL_DEFAULT_HEAP_ALIGN>  class MemTrackable : public MemTrackableNonVirtual<DERIVED, ALIGNMENT> diff --git a/indra/llcommon/lltracerecording.cpp b/indra/llcommon/lltracerecording.cpp index d6232d771d..0b10438b9f 100644 --- a/indra/llcommon/lltracerecording.cpp +++ b/indra/llcommon/lltracerecording.cpp @@ -100,6 +100,7 @@ Recording::~Recording()  // brings recording to front of recorder stack, with up to date info  void Recording::update()  { +#if LL_TRACE_ENABLED  	if (isStarted())  	{  		mElapsedSeconds += mSamplingTimer.getElapsedTimeF64(); @@ -117,46 +118,57 @@ void Recording::update()  		mSamplingTimer.reset();  	} +#endif  }  void Recording::handleReset()  { +#if LL_TRACE_ENABLED  	mBuffers.write()->reset();  	mElapsedSeconds = F64Seconds(0.0);  	mSamplingTimer.reset(); +#endif  }  void Recording::handleStart()  { +#if LL_TRACE_ENABLED  	mSamplingTimer.reset();  	mBuffers.setStayUnique(true);  	// must have thread recorder running on this thread  	llassert(LLTrace::get_thread_recorder().notNull());  	mActiveBuffers = LLTrace::get_thread_recorder()->activate(mBuffers.write()); +#endif  }  void Recording::handleStop()  { +#if LL_TRACE_ENABLED  	mElapsedSeconds += mSamplingTimer.getElapsedTimeF64();  	// must have thread recorder running on this thread  	llassert(LLTrace::get_thread_recorder().notNull());  	LLTrace::get_thread_recorder()->deactivate(mBuffers.write());  	mActiveBuffers = NULL;  	mBuffers.setStayUnique(false); +#endif  }  void Recording::handleSplitTo(Recording& other)  { +#if LL_TRACE_ENABLED  	mBuffers.write()->handOffTo(*other.mBuffers.write()); +#endif  }  void Recording::appendRecording( Recording& other )  { +#if LL_TRACE_ENABLED  	update();  	other.update();  	mBuffers.write()->append(*other.mBuffers);  	mElapsedSeconds += other.mElapsedSeconds; +#endif  }  bool Recording::hasValue(const StatType<TimeBlockAccumulator>& stat) diff --git a/indra/llcommon/lltracethreadrecorder.cpp b/indra/llcommon/lltracethreadrecorder.cpp index 187d8546d3..181fc2f058 100644 --- a/indra/llcommon/lltracethreadrecorder.cpp +++ b/indra/llcommon/lltracethreadrecorder.cpp @@ -47,6 +47,7 @@ ThreadRecorder::ThreadRecorder()  void ThreadRecorder::init()  { +#if LL_TRACE_ENABLED  	LLThreadLocalSingletonPointer<BlockTimerStackRecord>::setInstance(&mBlockTimerStackRecord);  	//NB: the ordering of initialization in this function is very fragile due to a large number of implicit dependencies  	set_thread_recorder(this); @@ -83,6 +84,7 @@ void ThreadRecorder::init()  	claim_alloc(gTraceMemStat, this);  	claim_alloc(gTraceMemStat, mRootTimer);  	claim_alloc(gTraceMemStat, sizeof(TimeBlockTreeNode) * mNumTimeBlockTreeNodes); +#endif  } @@ -96,6 +98,7 @@ ThreadRecorder::ThreadRecorder( ThreadRecorder& parent )  ThreadRecorder::~ThreadRecorder()  { +#if LL_TRACE_ENABLED  	LLThreadLocalSingletonPointer<BlockTimerStackRecord>::setInstance(NULL);  	disclaim_alloc(gTraceMemStat, this); @@ -119,19 +122,23 @@ ThreadRecorder::~ThreadRecorder()  	{  		mParentRecorder->removeChildRecorder(this);  	} +#endif  }  TimeBlockTreeNode* ThreadRecorder::getTimeBlockTreeNode( S32 index )  { +#if LL_TRACE_ENABLED  	if (0 <= index && index < mNumTimeBlockTreeNodes)  	{  		return &mTimeBlockTreeNodes[index];  	} +#endif  	return NULL;  }  AccumulatorBufferGroup* ThreadRecorder::activate( AccumulatorBufferGroup* recording)  { +#if LL_TRACE_ENABLED  	ActiveRecording* active_recording = new ActiveRecording(recording);  	if (!mActiveRecordings.empty())  	{ @@ -144,10 +151,14 @@ AccumulatorBufferGroup* ThreadRecorder::activate( AccumulatorBufferGroup* record  	mActiveRecordings.back()->mPartialRecording.makeCurrent();  	return &active_recording->mPartialRecording; +#else +	return NULL; +#endif  }  ThreadRecorder::active_recording_list_t::iterator ThreadRecorder::bringUpToDate( AccumulatorBufferGroup* recording )  { +#if LL_TRACE_ENABLED  	if (mActiveRecordings.empty()) return mActiveRecordings.end();  	mActiveRecordings.back()->mPartialRecording.sync(); @@ -186,10 +197,14 @@ ThreadRecorder::active_recording_list_t::iterator ThreadRecorder::bringUpToDate(  	}  	return (++it).base(); +#else +	return ThreadRecorder::active_recording_list_t::iterator(); +#endif  }  void ThreadRecorder::deactivate( AccumulatorBufferGroup* recording )  { +#if LL_TRACE_ENABLED  	active_recording_list_t::iterator recording_it = bringUpToDate(recording);  	// this method should only be called on a thread where the recorder is active  	llassert_always(recording_it != mActiveRecordings.end()); @@ -210,6 +225,7 @@ void ThreadRecorder::deactivate( AccumulatorBufferGroup* recording )  		}  	}  	delete recording_to_remove; +#endif  }  ThreadRecorder::ActiveRecording::ActiveRecording( AccumulatorBufferGroup* target )  @@ -218,35 +234,43 @@ ThreadRecorder::ActiveRecording::ActiveRecording( AccumulatorBufferGroup* target  void ThreadRecorder::ActiveRecording::movePartialToTarget()  { +#if LL_TRACE_ENABLED  	mTargetRecording->append(mPartialRecording);  	// reset based on self to keep history  	mPartialRecording.reset(&mPartialRecording); +#endif  }  // called by child thread  void ThreadRecorder::addChildRecorder( class ThreadRecorder* child )  { +#if LL_TRACE_ENABLED  	{ LLMutexLock lock(&mChildListMutex);  		mChildThreadRecorders.push_back(child);  	} +#endif  }  // called by child thread  void ThreadRecorder::removeChildRecorder( class ThreadRecorder* child )  {	 +#if LL_TRACE_ENABLED  	{ LLMutexLock lock(&mChildListMutex);  		mChildThreadRecorders.remove(child);  	} +#endif  }  void ThreadRecorder::pushToParent()  { +#if LL_TRACE_ENABLED  	{ LLMutexLock lock(&mSharedRecordingMutex);	  		LLTrace::get_thread_recorder()->bringUpToDate(&mThreadRecordingBuffers);  		mSharedRecordingBuffers.append(mThreadRecordingBuffers);  		mThreadRecordingBuffers.reset();  	} +#endif  } @@ -254,6 +278,7 @@ static LLTrace::BlockTimerStatHandle FTM_PULL_TRACE_DATA_FROM_CHILDREN("Pull chi  void ThreadRecorder::pullFromChildren()  { +#if LL_TRACE_ENABLED  	LL_RECORD_BLOCK_TIME(FTM_PULL_TRACE_DATA_FROM_CHILDREN);  	if (mActiveRecordings.empty()) return; @@ -270,6 +295,7 @@ void ThreadRecorder::pullFromChildren()  			(*it)->mSharedRecordingBuffers.reset();  		}  	} +#endif  } | 
