From a4c2eab969b1971389408de5a3927f87d74a6d6d Mon Sep 17 00:00:00 2001 From: Rye Mutt Date: Thu, 28 Sep 2023 16:05:37 -0400 Subject: Fix integer overflow when framebuffers are extremely high resolution resulting in INT_MAX texture bias (#393) --- indra/newview/llviewertexture.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'indra/newview/llviewertexture.cpp') diff --git a/indra/newview/llviewertexture.cpp b/indra/newview/llviewertexture.cpp index 9fc092d4b9..9336d99555 100644 --- a/indra/newview/llviewertexture.cpp +++ b/indra/newview/llviewertexture.cpp @@ -534,9 +534,12 @@ void LLViewerTexture::updateClass() static LLCachedControl max_vram_budget(gSavedSettings, "RenderMaxVRAMBudget", 0); + F64 texture_bytes_alloc = LLImageGL::getTextureBytesAllocated() / 1024.0 / 512.0; + F64 vertex_bytes_alloc = LLVertexBuffer::getBytesAllocated() / 1024.0 / 512.0; + // get an estimate of how much video memory we're using // NOTE: our metrics miss about half the vram we use, so this biases high but turns out to typically be within 5% of the real number - F32 used = (LLImageGL::getTextureBytesAllocated() + LLVertexBuffer::getBytesAllocated()) / 1024 / 512; + F32 used = (F32)ll_round(texture_bytes_alloc + vertex_bytes_alloc); F32 budget = max_vram_budget == 0 ? gGLManager.mVRAM : max_vram_budget; -- cgit v1.2.3