diff options
Diffstat (limited to 'indra/llrender/llgl.cpp')
-rw-r--r-- | indra/llrender/llgl.cpp | 20 |
1 files changed, 13 insertions, 7 deletions
diff --git a/indra/llrender/llgl.cpp b/indra/llrender/llgl.cpp index a60f1efbdf..798b605f08 100644 --- a/indra/llrender/llgl.cpp +++ b/indra/llrender/llgl.cpp @@ -995,9 +995,6 @@ LLGLManager::LLGLManager() : mIsAMD(false), mIsNVIDIA(false), mIsIntel(false), -#if LL_DARWIN - mIsMobileGF(false), -#endif mHasRequirements(true), mDriverVersionMajor(1), mDriverVersionMinor(0), @@ -1039,7 +1036,6 @@ void LLGLManager::initWGL() GLH_EXT_NAME(wglGetGPUIDsAMD) = (PFNWGLGETGPUIDSAMDPROC)GLH_EXT_GET_PROC_ADDRESS("wglGetGPUIDsAMD"); GLH_EXT_NAME(wglGetGPUInfoAMD) = (PFNWGLGETGPUINFOAMDPROC)GLH_EXT_GET_PROC_ADDRESS("wglGetGPUInfoAMD"); } - mHasNVXGpuMemoryInfo = ExtensionExists("GL_NVX_gpu_memory_info", gGLHExts.mSysExts); if (ExtensionExists("WGL_EXT_swap_control", gGLHExts.mSysExts)) { @@ -1145,7 +1141,11 @@ bool LLGLManager::initGL() // Trailing space necessary to keep "nVidia Corpor_ati_on" cards // from being recognized as ATI. // NOTE: AMD has been pretty good about not breaking this check, do not rename without good reason - if (mGLVendor.substr(0,4) == "ATI ") + if (mGLVendor.substr(0,4) == "ATI " +#if LL_LINUX + || mGLVendor.find("AMD") != std::string::npos +#endif //LL_LINUX + ) { mGLVendorShort = "AMD"; // *TODO: Fix this? @@ -1211,8 +1211,10 @@ bool LLGLManager::initGL() { LL_WARNS("RenderInit") << "VRAM Detected (AMDAssociations):" << mVRAM << LL_ENDL; } - } - else if (mHasNVXGpuMemoryInfo) + } else +#endif +#if LL_WINDOWS || LL_LINUX + if (mHasNVXGpuMemoryInfo) { GLint mem_kb = 0; glGetIntegerv(GL_GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX, &mem_kb); @@ -1430,6 +1432,10 @@ void LLGLManager::initExtensions() mHasTransformFeedback = mGLVersion >= 3.99f; mHasDebugOutput = mGLVersion >= 4.29f; +#if LL_WINDOWS || LL_LINUX + mHasNVXGpuMemoryInfo = ExtensionExists("GL_NVX_gpu_memory_info", gGLHExts.mSysExts); +#endif + // Misc glGetIntegerv(GL_MAX_ELEMENTS_VERTICES, (GLint*) &mGLMaxVertexRange); glGetIntegerv(GL_MAX_ELEMENTS_INDICES, (GLint*) &mGLMaxIndexRange); |