diff options
author | Erik Kundiman <erik@megapahit.org> | 2024-01-06 15:26:51 +0800 |
---|---|---|
committer | Erik Kundiman <erik@megapahit.org> | 2024-01-06 15:30:35 +0800 |
commit | 4d1b19e794c3d7b65e1fe6c0e41ced57ec7e3938 (patch) | |
tree | 15788c37014aae06f33b6530227f5d5a7c3c8538 /indra | |
parent | 2972db3c5097bfa435dc97886320c888e1e6fae9 (diff) |
Revive VRAM detection on macOS
Copied code from llopenglview-objc.mm.
It would normally fail SDL2, but somehow adding a logging fixed it O.o
This is so texture compression is not enabled by default.
Diffstat (limited to 'indra')
-rw-r--r-- | indra/llwindow/llwindowsdl.cpp | 16 |
1 files changed, 16 insertions, 0 deletions
diff --git a/indra/llwindow/llwindowsdl.cpp b/indra/llwindow/llwindowsdl.cpp index ecbbde20df..68ad988132 100644 --- a/indra/llwindow/llwindowsdl.cpp +++ b/indra/llwindow/llwindowsdl.cpp @@ -68,6 +68,9 @@ static bool ATIbug = false; #if LL_DARWIN +#include <OpenGL/OpenGL.h> +#include <CoreGraphics/CGDirectDisplay.h> + BOOL gHiDPISupport = TRUE; namespace @@ -671,6 +674,19 @@ BOOL LLWindowSDL::createContext(int x, int y, int width, int height, int bits, B { LL_INFOS() << "X11 log-parser detected " << gGLManager.mVRAM << "MB VRAM." << LL_ENDL; } else +#elif LL_DARWIN + CGLRendererInfoObj info = 0; + GLint vram_megabytes = 0; + int num_renderers = 0; + auto err = CGLQueryRendererInfo(CGDisplayIDToOpenGLDisplayMask(kCGDirectMainDisplay), + &info, &num_renderers); + if (!err) { + CGLDescribeRenderer(info, 0, kCGLRPVideoMemoryMegabytes, &vram_megabytes); + CGLDestroyRendererInfo(info); + } else + vram_megabytes = 256; + gGLManager.mVRAM = vram_megabytes; + LL_INFOS() << "Detected " << gGLManager.mVRAM << "MB VRAM." << LL_ENDL; # endif // LL_X11 { /* |