summaryrefslogtreecommitdiff
path: root/indra/llinventory/llinventory.cpp
diff options
context:
space:
mode:
authorAdam Moss <moss@lindenlab.com>2009-04-16 23:45:35 +0000
committerAdam Moss <moss@lindenlab.com>2009-04-16 23:45:35 +0000
commitb01c75cb423f07a3d3354f8bd62f265f80062b3b (patch)
treedec1b220c24a60cc220d1cb07fd3545610644f0a /indra/llinventory/llinventory.cpp
parent868250bdd74f348557102c0d8408d9bec30331f6 (diff)
svn merge -r117314:117337
svn+ssh://svn.lindenlab.com/svn/linden/branches/moss/mv13a-merge-1 QAR-1343 maint-viewer-13a+libcurlexploitfix-3-3 combo merge
Diffstat (limited to 'indra/llinventory/llinventory.cpp')
-rw-r--r--indra/llinventory/llinventory.cpp20
1 files changed, 8 insertions, 12 deletions
diff --git a/indra/llinventory/llinventory.cpp b/indra/llinventory/llinventory.cpp
index 4dddd9de3e..2823cf7be9 100644
--- a/indra/llinventory/llinventory.cpp
+++ b/indra/llinventory/llinventory.cpp
@@ -1257,23 +1257,19 @@ void LLInventoryItem::unpackBinaryBucket(U8* bin_bucket, S32 bin_bucket_size)
// Early exit on an empty binary bucket.
if (bin_bucket_size <= 1) return;
- // Convert the bin_bucket into a string.
- char* item_buffer = new char[bin_bucket_size+1];
- if ((item_buffer != NULL) && (bin_bucket != NULL))
- {
- memcpy(item_buffer, bin_bucket, bin_bucket_size); /* Flawfinder: ignore */
- }
- else
+ if (NULL == bin_bucket)
{
- llerrs << "unpackBinaryBucket failed. item_buffer or bin_bucket is Null." << llendl;
- delete[] item_buffer;
+ llerrs << "unpackBinaryBucket failed. bin_bucket is NULL." << llendl;
return;
}
+
+ // Convert the bin_bucket into a string.
+ std::vector<char> item_buffer(bin_bucket_size+1);
+ memcpy(&item_buffer[0], bin_bucket, bin_bucket_size); /* Flawfinder: ignore */
item_buffer[bin_bucket_size] = '\0';
- std::string str(item_buffer);
+ std::string str(&item_buffer[0]);
- lldebugs << "item buffer: " << item_buffer << llendl;
- delete[] item_buffer;
+ lldebugs << "item buffer: " << str << llendl;
// Tokenize the string.
typedef boost::tokenizer<boost::char_separator<char> > tokenizer;