summaryrefslogtreecommitdiff
path: root/indra/llmath/llvolume.cpp
diff options
context:
space:
mode:
authorRunitaiLinden <davep@lindenlab.com>2023-04-13 19:41:11 -0500
committerRunitaiLinden <davep@lindenlab.com>2023-04-13 19:41:11 -0500
commit6c5b9076704865e96adb5b4a25a518633da1174e (patch)
treec75ca16f47bbf8bc5b2bff08256db127097d843a /indra/llmath/llvolume.cpp
parent23365ca51aef2447efa07b0097a421ff864feec1 (diff)
SL-19358 Fix for explody meshes.
Diffstat (limited to 'indra/llmath/llvolume.cpp')
-rw-r--r--indra/llmath/llvolume.cpp51
1 files changed, 29 insertions, 22 deletions
diff --git a/indra/llmath/llvolume.cpp b/indra/llmath/llvolume.cpp
index 7a694ab10c..2a906c8d41 100644
--- a/indra/llmath/llvolume.cpp
+++ b/indra/llmath/llvolume.cpp
@@ -5574,37 +5574,44 @@ bool LLVolumeFace::cacheOptimize(bool gen_tangents)
U32 vert_count = meshopt_generateVertexRemapMulti(&remap[0], nullptr, data.p.size(), data.p.size(), mos, stream_count);
- std::vector<U32> indices;
- indices.resize(mNumIndices);
+ if (vert_count < 65535)
+ {
+ std::vector<U32> indices;
+ indices.resize(mNumIndices);
- //copy results back into volume
- resizeVertices(vert_count);
+ //copy results back into volume
+ resizeVertices(vert_count);
- if (!data.w.empty())
- {
- allocateWeights(vert_count);
- }
+ if (!data.w.empty())
+ {
+ allocateWeights(vert_count);
+ }
- allocateTangents(mNumVertices);
+ allocateTangents(mNumVertices);
- for (int i = 0; i < mNumIndices; ++i)
- {
- U32 src_idx = i;
- U32 dst_idx = remap[i];
- mIndices[i] = dst_idx;
+ for (int i = 0; i < mNumIndices; ++i)
+ {
+ U32 src_idx = i;
+ U32 dst_idx = remap[i];
+ mIndices[i] = dst_idx;
- mPositions[dst_idx].load3(data.p[src_idx].mV);
- mNormals[dst_idx].load3(data.n[src_idx].mV);
- mTexCoords[dst_idx] = data.tc[src_idx];
+ mPositions[dst_idx].load3(data.p[src_idx].mV);
+ mNormals[dst_idx].load3(data.n[src_idx].mV);
+ mTexCoords[dst_idx] = data.tc[src_idx];
- mTangents[dst_idx].loadua(data.t[src_idx].mV);
+ mTangents[dst_idx].loadua(data.t[src_idx].mV);
- if (mWeights)
- {
- mWeights[dst_idx].loadua(data.w[src_idx].mV);
+ if (mWeights)
+ {
+ mWeights[dst_idx].loadua(data.w[src_idx].mV);
+ }
}
}
-
+ else
+ {
+ // blew past the max vertex size limit, use legacy tangent generation which never adds verts
+ createTangents();
+ }
// put back in normalized coordinate frame
LLVector4a inv_scale(1.f/mNormalizedScale.mV[0], 1.f / mNormalizedScale.mV[1], 1.f / mNormalizedScale.mV[2]);