diff options
Diffstat (limited to 'indra/llrender')
-rw-r--r-- | indra/llrender/llrender.cpp | 5 | ||||
-rw-r--r-- | indra/llrender/llrender.h | 1 | ||||
-rw-r--r-- | indra/llrender/llshadermgr.cpp | 54 |
3 files changed, 40 insertions, 20 deletions
diff --git a/indra/llrender/llrender.cpp b/indra/llrender/llrender.cpp index 812fa7024b..cd827f5091 100644 --- a/indra/llrender/llrender.cpp +++ b/indra/llrender/llrender.cpp @@ -1444,6 +1444,11 @@ const glh::matrix4f& LLRender::getModelviewMatrix() return mMatrix[MM_MODELVIEW][mMatIdx[MM_MODELVIEW]]; } +const glh::matrix4f& LLRender::getProjectionMatrix() +{ + return mMatrix[MM_PROJECTION][mMatIdx[MM_PROJECTION]]; +} + void LLRender::translateUI(F32 x, F32 y, F32 z) { if (mUIOffset.empty()) diff --git a/indra/llrender/llrender.h b/indra/llrender/llrender.h index 7581b9f908..fa5f7f311d 100644 --- a/indra/llrender/llrender.h +++ b/indra/llrender/llrender.h @@ -348,6 +348,7 @@ public: void matrixMode(U32 mode); const glh::matrix4f& getModelviewMatrix(); + const glh::matrix4f& getProjectionMatrix(); void syncMatrices(); void syncLightState(); diff --git a/indra/llrender/llshadermgr.cpp b/indra/llrender/llshadermgr.cpp index eea768a3ea..ac9dc9544d 100644 --- a/indra/llrender/llshadermgr.cpp +++ b/indra/llrender/llshadermgr.cpp @@ -845,28 +845,42 @@ BOOL LLShaderMgr::linkProgramObject(GLhandleARB obj, BOOL suppress_errors) LL_WARNS("ShaderLoading") << "GLSL Linker Error:" << LL_ENDL; } -// NOTE: Removing LL_DARWIN block as it doesn't seem to actually give the correct answer, -// but want it for reference once I move it. -#if 0 - // Force an evaluation of the gl state so the driver can tell if the shader will run in hardware or software - // per Apple's suggestion - glBegin(gGL.mMode); - glEnd(); - - // Query whether the shader can or cannot run in hardware - // http://developer.apple.com/qa/qa2007/qa1502.html - long vertexGPUProcessing; - CGLContextObj ctx = CGLGetCurrentContext(); - CGLGetParameter (ctx, kCGLCPGPUVertexProcessing, &vertexGPUProcessing); - long fragmentGPUProcessing; - CGLGetParameter (ctx, kCGLCPGPUFragmentProcessing, &fragmentGPUProcessing); - if (!fragmentGPUProcessing || !vertexGPUProcessing) +#if LL_DARWIN + + // For some reason this absolutely kills the frame rate when VBO's are enabled + if (0) { - LL_WARNS("ShaderLoading") << "GLSL Linker: Running in Software:" << LL_ENDL; - success = GL_FALSE; - suppress_errors = FALSE; + // Force an evaluation of the gl state so the driver can tell if the shader will run in hardware or software + // per Apple's suggestion + LLGLSLShader::sNoFixedFunction = false; + + glUseProgramObjectARB(obj); + + gGL.begin(LLRender::TRIANGLES); + gGL.vertex3f(0.0f, 0.0f, 0.0f); + gGL.vertex3f(0.0f, 0.0f, 0.0f); + gGL.vertex3f(0.0f, 0.0f, 0.0f); + gGL.end(); + gGL.flush(); + + glUseProgramObjectARB(0); + + LLGLSLShader::sNoFixedFunction = true; + + // Query whether the shader can or cannot run in hardware + // http://developer.apple.com/qa/qa2007/qa1502.html + GLint vertexGPUProcessing, fragmentGPUProcessing; + CGLContextObj ctx = CGLGetCurrentContext(); + CGLGetParameter(ctx, kCGLCPGPUVertexProcessing, &vertexGPUProcessing); + CGLGetParameter(ctx, kCGLCPGPUFragmentProcessing, &fragmentGPUProcessing); + if (!fragmentGPUProcessing || !vertexGPUProcessing) + { + LL_WARNS("ShaderLoading") << "GLSL Linker: Running in Software:" << LL_ENDL; + success = GL_FALSE; + suppress_errors = FALSE; + } } - + #else std::string log = get_object_log(obj); LLStringUtil::toLower(log); |