summaryrefslogtreecommitdiff
path: root/indra/llui/llkeywords.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'indra/llui/llkeywords.cpp')
-rw-r--r--indra/llui/llkeywords.cpp558
1 files changed, 407 insertions, 151 deletions
diff --git a/indra/llui/llkeywords.cpp b/indra/llui/llkeywords.cpp
index c1cd04186b..2ff0298ba6 100644
--- a/indra/llui/llkeywords.cpp
+++ b/indra/llui/llkeywords.cpp
@@ -1,25 +1,25 @@
-/**
+/**
* @file llkeywords.cpp
* @brief Keyword list for LSL
*
* $LicenseInfo:firstyear=2000&license=viewerlgpl$
* Second Life Viewer Source Code
* Copyright (C) 2010, Linden Research, Inc.
- *
+ *
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation;
* version 2.1 of the License only.
- *
+ *
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- *
+ *
* Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
* $/LicenseInfo$
*/
@@ -29,12 +29,12 @@
#include <iostream>
#include <fstream>
+#include "lldir.h"
#include "llkeywords.h"
+#include "llsdserialize.h"
#include "lltexteditor.h"
#include "llstl.h"
-#include <boost/tokenizer.hpp>
-const U32 KEYWORD_FILE_CURRENT_VERSION = 2;
inline BOOL LLKeywordToken::isHead(const llwchar* s) const
{
@@ -53,10 +53,6 @@ inline BOOL LLKeywordToken::isHead(const llwchar* s) const
return res;
}
-LLKeywords::LLKeywords() : mLoaded(FALSE)
-{
-}
-
inline BOOL LLKeywordToken::isTail(const llwchar* s) const
{
BOOL res = TRUE;
@@ -73,6 +69,12 @@ inline BOOL LLKeywordToken::isTail(const llwchar* s) const
return res;
}
+LLKeywords::LLKeywords() : mLoaded(FALSE)
+{
+ setFilenameColors( gDirUtilp->getExpandedFilename(LL_PATH_APP_SETTINGS,"keywords_lsl_colors.xml") );
+ setFilenameSyntax( gDirUtilp->getExpandedFilename(LL_PATH_APP_SETTINGS,"keywords_lsl_tokens.xml") );
+}
+
LLKeywords::~LLKeywords()
{
std::for_each(mWordTokenMap.begin(), mWordTokenMap.end(), DeletePairedPointer());
@@ -80,180 +82,418 @@ LLKeywords::~LLKeywords()
std::for_each(mDelimiterTokenList.begin(), mDelimiterTokenList.end(), DeletePointer());
}
-BOOL LLKeywords::loadFromFile( const std::string& filename )
+
+
+void LLKeywords::addColorGroup(const std::string key_in, const LLColor3 color)
{
- mLoaded = FALSE;
+ WStringMapIndex key ( utf8str_to_wstring(key_in) );
+ mColorGroupMap[key] = color;
+}
- ////////////////////////////////////////////////////////////
- // File header
+// Add the token as described
+void LLKeywords::addToken(LLKeywordToken::TOKEN_TYPE type,
+ const std::string& key_in,
+ const LLColor3& color,
+ const std::string& tool_tip_in,
+ const std::string& delimiter_in)
+{
+ std::string tip_text = tool_tip_in;
+ LLStringUtil::replaceString(tip_text, "\\n", "\n" );
+ LLStringUtil::replaceString(tip_text, "\t", " " );
+ if (tip_text =="")
+ {
+ tip_text = "[no info]";
+ }
+ LLWString tool_tip = utf8str_to_wstring(tip_text);
- const S32 BUFFER_SIZE = 1024;
- char buffer[BUFFER_SIZE]; /* Flawfinder: ignore */
+ LLWString key = utf8str_to_wstring(key_in);
+ LLWString delimiter = utf8str_to_wstring(delimiter_in);
+ switch(type)
+ {
+ case LLKeywordToken::TT_CONSTANT:
+ case LLKeywordToken::TT_EVENT:
+ case LLKeywordToken::TT_FLOW:
+ case LLKeywordToken::TT_FUNCTION:
+ case LLKeywordToken::TT_LABEL:
+ case LLKeywordToken::TT_SECTION:
+ case LLKeywordToken::TT_TYPE:
+ case LLKeywordToken::TT_WORD:
+ mWordTokenMap[key] = new LLKeywordToken(type, color, key, tool_tip, LLWStringUtil::null);
+ break;
- llifstream file;
- file.open(filename); /* Flawfinder: ignore */
- if( file.fail() )
+ case LLKeywordToken::TT_LINE:
+ mLineTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip, LLWStringUtil::null));
+ break;
+
+ case LLKeywordToken::TT_TWO_SIDED_DELIMITER:
+ case LLKeywordToken::TT_DOUBLE_QUOTATION_MARKS:
+ case LLKeywordToken::TT_ONE_SIDED_DELIMITER:
+ mDelimiterTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip, delimiter));
+ break;
+
+ default:
+ llassert(0);
+ }
+}
+
+std::string LLKeywords::getArguments(LLSD& arguments)
+{
+ std::string args = "";
+ if (arguments.isArray())
{
- llinfos << "LLKeywords::loadFromFile() Unable to open file: " << filename << llendl;
- return mLoaded;
+ int count = 0;
+ do
+ {
+ LLSD arg = arguments[count];
+ args += arg.get("type").asString() + " " + arg.get("name").asString();
+ ++count;
+ if (arguments.size() - count > 0)
+ {
+ args += ", ";
+ }
+ } while (count < arguments.size());
}
+ else
+ {
+ LL_WARNS("Arguments") << "Not an array! Invalid LLSD passed to function.\n" << arguments << LL_ENDL;
+ }
+ return args == "" ? " void " : args;
+}
- // Identifying string
- file >> buffer;
- if( strcmp( buffer, "llkeywords" ) )
+std::string LLKeywords::getAttribute(const std::string& key)
+{
+ attribute_iterator_t it = mAttributes.find(key);
+ return (it != mAttributes.end()) ? it->second : "";
+}
+
+LLColor3 LLKeywords::getColorGroup(const std::string key_in)
+{
+ // LLColor3 initialises to Black (0,0,0)
+ LLColor3 Colour;
+ WStringMapIndex key ( utf8str_to_wstring(key_in) );
+ group_color_map_t::iterator it = mColorGroupMap.find(key);
+ if (it == mColorGroupMap.end())
+ {
+ LL_WARNS("Colour lookup") << "'" << key_in << "' not found!" << LL_ENDL;
+ }
+ else
{
- llinfos << filename << " does not appear to be a keyword file" << llendl;
- return mLoaded;
+ Colour = it->second;
}
- // Check file version
- file >> buffer;
- U32 version_num;
- file >> version_num;
- if( strcmp(buffer, "version") || version_num != (U32)KEYWORD_FILE_CURRENT_VERSION )
+ return Colour;
+}
+
+BOOL LLKeywords::initialise()
+{
+ mReady = false;
+
+ if (! loadIntoLLSD(mFilenameColors, mColors) )
{
- llinfos << filename << " does not appear to be a version " << KEYWORD_FILE_CURRENT_VERSION << " keyword file" << llendl;
- return mLoaded;
+ LL_ERRS("") << "Failed to load color data, cannot continue!" << LL_ENDL;
+ }
+ else if (! loadIntoLLSD(mFilenameSyntax, mSyntax) )
+ {
+ LL_ERRS("") << "Failed to load syntax data from '" << mFilenameSyntax << "', cannot continue!" << LL_ENDL;
+ }
+ else
+ {
+ mReady = true;
}
- // start of line (SOL)
- std::string SOL_COMMENT("#");
- std::string SOL_WORD("[word ");
- std::string SOL_LINE("[line ");
- std::string SOL_ONE_SIDED_DELIMITER("[one_sided_delimiter ");
- std::string SOL_TWO_SIDED_DELIMITER("[two_sided_delimiter ");
- std::string SOL_DOUBLE_QUOTATION_MARKS("[double_quotation_marks ");
+ if (ready())
+ {
+ processColors();
+ }
+ else
+ {
+ LL_ERRS("") << LL_ENDL;
+ LL_ERRS("") << "Failed to load one or both data files, cannot continue!" << LL_ENDL;
+ }
+ return mReady;
+}
- LLColor3 cur_color( 1, 0, 0 );
- LLKeywordToken::TOKEN_TYPE cur_type = LLKeywordToken::WORD;
+BOOL LLKeywords::loadFromFile()
+{
+ processTokens();
+ return true;
+}
- while (!file.eof())
+/**
+ * @brief Load xml serialised LLSD
+ * @desc Opens the specified filespec and attempts to deserialise the
+ * contained data to the specified LLSD object.
+ * @return Returns boolean true/false indicating success or failure.
+ */
+BOOL LLKeywords::loadIntoLLSD(const std::string& filename, LLSD& data)
+{
+ mLoaded = false;
+ llifstream file;
+ file.open(filename);
+ if(file.is_open())
{
- buffer[0] = 0;
- file.getline( buffer, BUFFER_SIZE );
- std::string line(buffer);
- if( line.find(SOL_COMMENT) == 0 )
- {
- continue;
- }
- else if( line.find(SOL_WORD) == 0 )
+ mLoaded = (BOOL)LLSDSerialize::fromXML(data, file);
+ if (!mLoaded)
{
- cur_color = readColor( line.substr(SOL_WORD.size()) );
- cur_type = LLKeywordToken::WORD;
- continue;
+ LL_WARNS("") << "Unable to deserialise file: " << filename << LL_ENDL;
}
- else if( line.find(SOL_LINE) == 0 )
+ else
{
- cur_color = readColor( line.substr(SOL_LINE.size()) );
- cur_type = LLKeywordToken::LINE;
- continue;
- }
- else if( line.find(SOL_TWO_SIDED_DELIMITER) == 0 )
- {
- cur_color = readColor( line.substr(SOL_TWO_SIDED_DELIMITER.size()) );
- cur_type = LLKeywordToken::TWO_SIDED_DELIMITER;
- continue;
- }
- else if( line.find(SOL_DOUBLE_QUOTATION_MARKS) == 0 )
- {
- cur_color = readColor( line.substr(SOL_DOUBLE_QUOTATION_MARKS.size()) );
- cur_type = LLKeywordToken::DOUBLE_QUOTATION_MARKS;
- continue;
- }
- else if( line.find(SOL_ONE_SIDED_DELIMITER) == 0 )
- {
- cur_color = readColor( line.substr(SOL_ONE_SIDED_DELIMITER.size()) );
- cur_type = LLKeywordToken::ONE_SIDED_DELIMITER;
- continue;
+ LL_INFOS("") << "Deserialised file: " << filename << LL_ENDL;
}
+ }
+ else
+ {
+ LL_WARNS("") << "Unable to open file: " << filename << LL_ENDL;
+ }
+ return mLoaded;
+}
+
+/**
+ * @brief Start processing the colour LLSD from its beginning.
+ *
+ */
+std::string LLKeywords::processColors()
+{
+ return processColors(mColors, "");
+}
- std::string token_buffer( line );
- LLStringUtil::trim(token_buffer);
-
- typedef boost::tokenizer<boost::char_separator<char> > tokenizer;
- boost::char_separator<char> sep_word("", " \t");
- tokenizer word_tokens(token_buffer, sep_word);
- tokenizer::iterator token_word_iter = word_tokens.begin();
+/**
+ * @brief Recursively process the colour LLSD from an arbitrary level.
+ * @desc Process the supplied LLSD for colour data. The strPrefix is a string
+ * of hyphen separated keys from previous levels.
+ */
+std::string LLKeywords::processColors(LLSD &settings, const std::string strPrefix)
+{
+ if (settings.isMap() || (! settings.isMap() && strPrefix != "") )
+ {
+ LLSD llsd_map = settings;
- if( !token_buffer.empty() && token_word_iter != word_tokens.end() )
+ LLSD::map_iterator my_iter = llsd_map.beginMap();
+ for ( ; my_iter != llsd_map.endMap(); ++my_iter)
{
- // first word is the keyword or a left delimiter
- std::string keyword = (*token_word_iter);
- LLStringUtil::trim(keyword);
+ std::string strGroup = strPrefix;
+ const LLSD::String& key = my_iter->first;
+ LLSD& value = my_iter->second;
- // second word may be a right delimiter
- std::string delimiter;
- if (cur_type == LLKeywordToken::TWO_SIDED_DELIMITER)
+ if (key == "color")
{
- while (delimiter.length() == 0 && ++token_word_iter != word_tokens.end())
+ if (value.isMap() || value.isArray())
+ {
+ addColorGroup(strGroup, readColor(value));
+ }
+ else
{
- delimiter = *token_word_iter;
- LLStringUtil::trim(delimiter);
+ LL_WARNS("Invalid Color") << "Invalid Color Entry - first: '" << key << "' second: '" << value << "'" << LL_ENDL;
}
}
- else if (cur_type == LLKeywordToken::DOUBLE_QUOTATION_MARKS)
+ else if (value.isMap())
+ {
+ strGroup += (strGroup.length() == 0) ? my_iter->first : "-" + my_iter->first;
+ strGroup = processColors(value, strGroup);
+ }
+ else
{
- // Closing delimiter is identical to the opening one.
- delimiter = keyword;
+ LL_WARNS("Invalid Color") << "Invalid Color Entry - first: '" << key << "' second: '" << value << "'" << LL_ENDL;
}
+ }
+ }
+ return strPrefix;
+}
- // following words are tooltip
- std::string tool_tip;
- while (++token_word_iter != word_tokens.end())
+void LLKeywords::processTokens()
+{
+ // Add 'standard' stuff: Quotes, Comments, Strings, Labels, etc. before processing the LLSD
+ std::string delimiter;
+ addToken(LLKeywordToken::TT_LINE, "@", getColorGroup("misc-flow-label"), "Label\nTarget for jump statement", delimiter );
+ addToken(LLKeywordToken::TT_ONE_SIDED_DELIMITER, "//", getColorGroup("misc-comments_1_sided"), "Comment\nNon-functional commentary or disabled code", delimiter );
+ addToken(LLKeywordToken::TT_TWO_SIDED_DELIMITER, "/*", getColorGroup("misc-comments_2_sided"), "Comment\nNon-functional commentary or disabled code (multi-line)", "*/" );
+ addToken(LLKeywordToken::TT_DOUBLE_QUOTATION_MARKS, "\"", getColorGroup("misc-double_quotation_marks"), "String literal", "\"" );
+
+ LLSD::map_iterator outerIt = mSyntax.beginMap();
+ for ( ; outerIt != mSyntax.endMap(); ++outerIt)
+ {
+ // TODO Collapse the 'if's into two, those that call 'processTokens' directly and an else if (for 'misc') that doesn't
+ if (outerIt->first == "constants")
+ {
+ if (outerIt->second.isMap())
+ {
+ processTokensGroup(outerIt->second, "constants");
+ }
+ else
+ {
+ LL_ERRS("Tokens-Constants") << "No constants map to process!" << LL_ENDL;
+ }
+ }
+ else if(outerIt->first == "misc")
+ {
+ if (outerIt->second.isMap())
{
- tool_tip += (*token_word_iter);
+ LLSD::map_iterator innerIt = outerIt->second.beginMap();
+ for ( ; innerIt != outerIt->second.endMap(); ++innerIt)
+ {
+ processTokensGroup(innerIt->second, "misc-" + innerIt->first);
+ }
}
- LLStringUtil::trim(tool_tip);
-
- if( !tool_tip.empty() )
+ else
{
- // Replace : with \n for multi-line tool tips.
- LLStringUtil::replaceChar( tool_tip, ':', '\n' );
- addToken(cur_type, keyword, cur_color, tool_tip, delimiter );
+ LL_ERRS("Tokens-Misc") << "No misc map to process!" << LL_ENDL;
+ }
+ }
+ else if(outerIt->first == "events")
+ {
+ if (outerIt->second.isMap())
+ {
+ processTokensGroup(outerIt->second, "events");
}
else
{
- addToken(cur_type, keyword, cur_color, LLStringUtil::null, delimiter );
+ LL_ERRS("Tokens-Events") << "No event map to process!" << LL_ENDL;
}
}
+ else if(outerIt->first == "functions")
+ {
+ if (outerIt->second.isMap())
+ {
+ processTokensGroup(outerIt->second, "functions");
+ }
+ else
+ {
+ LL_ERRS("Tokens-Functions") << "No function map to process!" << LL_ENDL;
+ }
+ }
+ else if(outerIt->first == "types")
+ {
+ if (outerIt->second.isArray())
+ {
+ processTokensGroup(outerIt->second, "types");
+ }
+ else
+ {
+ LL_ERRS("Tokens-Types") << "No types array to process!" << LL_ENDL;
+ }
+ }
+ else
+ {
+ LL_ERRS("Tokens") << "Unknown token group '" << outerIt->first << "'" << LL_ENDL;
+ }
}
-
- file.close();
-
- mLoaded = TRUE;
- return mLoaded;
+ LL_INFOS("") << LL_ENDL;
}
-// Add the token as described
-void LLKeywords::addToken(LLKeywordToken::TOKEN_TYPE type,
- const std::string& key_in,
- const LLColor3& color,
- const std::string& tool_tip_in,
- const std::string& delimiter_in)
+void LLKeywords::processTokensGroup(LLSD& Tokens, const std::string Group)
{
- LLWString key = utf8str_to_wstring(key_in);
- LLWString tool_tip = utf8str_to_wstring(tool_tip_in);
- LLWString delimiter = utf8str_to_wstring(delimiter_in);
- switch(type)
+ LLColor3 Color = getColorGroup(Group);
+ LL_INFOS("Tokens") << "Group: '" << Group << "', using colour: '" << Color << "'" << LL_ENDL;
+
+ LLKeywordToken::TOKEN_TYPE token_type = LLKeywordToken::TT_UNKNOWN;
+ // If a new token type is added here, it must also be added to the 'addToken' method
+ if (Group == "constants")
{
- case LLKeywordToken::WORD:
- mWordTokenMap[key] = new LLKeywordToken(type, color, key, tool_tip, LLWStringUtil::null);
- break;
+ token_type = LLKeywordToken::TT_CONSTANT;
+ }
+ else if (Group == "events")
+ {
+ token_type = LLKeywordToken::TT_EVENT;
+ }
+ else if (Group == "misc-flow-control")
+ {
+ token_type = LLKeywordToken::TT_FLOW;
+ }
+ else if (Group == "functions")
+ {
+ token_type = LLKeywordToken::TT_FUNCTION;
+ }
+ else if (Group == "misc-flow-label")
+ {
+ token_type = LLKeywordToken::TT_LABEL;
+ }
+ else if (Group == "misc-sections")
+ {
+ token_type = LLKeywordToken::TT_SECTION;
+ }
+ else if (Group == "types")
+ {
+ token_type = LLKeywordToken::TT_TYPE;
+ }
- case LLKeywordToken::LINE:
- mLineTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip, LLWStringUtil::null));
- break;
+ if (Tokens.isMap()) // constants, events, functions, and misc
+ {
+ LLSD::map_iterator outerIt = Tokens.beginMap();
+ for ( ; outerIt != Tokens.endMap(); ++outerIt)
+ {
+ if (outerIt->second.isMap())
+ {
+ mAttributes.clear();
+ LLSD arguments = LLSD ();
+ LLSD::map_iterator innerIt = outerIt->second.beginMap();
+ for ( ; innerIt != outerIt->second.endMap(); ++innerIt)
+ {
+ if (innerIt->first != "arguments")
+ {
+ mAttributes[innerIt->first] = innerIt->second.asString();
+ }
+ else if (innerIt->second.isArray())
+ {
+ arguments = innerIt->second;
+ }
+ }
- case LLKeywordToken::TWO_SIDED_DELIMITER:
- case LLKeywordToken::DOUBLE_QUOTATION_MARKS:
- case LLKeywordToken::ONE_SIDED_DELIMITER:
- mDelimiterTokenList.push_front(new LLKeywordToken(type, color, key, tool_tip, delimiter));
- break;
+ std::string tooltip = "";
+ if (token_type == LLKeywordToken::TT_CONSTANT)
+ {
+ Color = getColorGroup(Group + "-" + getAttribute("type"));
+ tooltip = "Type: " + getAttribute("type") + ", Value: " + getAttribute("value");
+ }
+ else if (token_type == LLKeywordToken::TT_EVENT)
+ {
+ tooltip = outerIt->first + "(" + getArguments(arguments) + ")";
+ }
+ else if (token_type == LLKeywordToken::TT_FLOW)
+ {
+ tooltip = "flow baby";
+ }
+ else if (token_type == LLKeywordToken::TT_FUNCTION)
+ {
+ tooltip = getAttribute("return") + " " + outerIt->first + "(" + getArguments(arguments) + ");";
+ tooltip += "\nEnergy: ";
+ tooltip += getAttribute("energy") == "" ? "0.0" : getAttribute("energy");
+ if (getAttribute("sleep") != "")
+ {
+ tooltip += ", Sleep: " + getAttribute("sleep");
+ }
+ }
+ else if (token_type == LLKeywordToken::TT_SECTION)
+ {
+ tooltip = "section";
+ }
- default:
- llassert(0);
+ if (getAttribute("summry") != "")
+ {
+ tooltip += "\n" + getAttribute("summary");
+ }
+ else if (getAttribute("description") != "")
+ {
+ tooltip += "\n" + getAttribute("description");
+ }
+
+ addToken(token_type, outerIt->first, Color, tooltip);
+ }
+ }
+ }
+ else if (Tokens.isArray()) // types
+ {
+ for (int count = 0; count < Tokens.size(); ++count)
+ {
+ addToken(token_type, Tokens[count], Color, "");
+ }
+ }
+ else
+ {
+ LL_INFOS("Tokens") << "Invalid map/array passed: '" << Tokens << "'" << LL_ENDL;
}
}
+
LLKeywords::WStringMapIndex::WStringMapIndex(const WStringMapIndex& other)
{
if(other.mOwner)
@@ -298,13 +538,13 @@ bool LLKeywords::WStringMapIndex::operator<(const LLKeywords::WStringMapIndex &o
{
// NOTE: Since this is only used to organize a std::map, it doesn't matter if it uses correct collate order or not.
// The comparison only needs to strictly order all possible strings, and be stable.
-
+
bool result = false;
const llwchar* self_iter = mData;
const llwchar* self_end = mData + mLength;
const llwchar* other_iter = other.mData;
const llwchar* other_end = other.mData + other.mLength;
-
+
while(true)
{
if(other_iter >= other_end)
@@ -319,7 +559,7 @@ bool LLKeywords::WStringMapIndex::operator<(const LLKeywords::WStringMapIndex &o
{
// self is shorter than other.
result = true;
- break;
+ break;
}
else if(*self_iter != *other_iter)
{
@@ -331,7 +571,7 @@ bool LLKeywords::WStringMapIndex::operator<(const LLKeywords::WStringMapIndex &o
self_iter++;
other_iter++;
}
-
+
return result;
}
@@ -347,6 +587,22 @@ LLColor3 LLKeywords::readColor( const std::string& s )
return LLColor3( r, g, b );
}
+LLColor3 LLKeywords::readColor(LLSD& sd)
+{
+ if (sd.isArray())
+ {
+ return LLColor3 (sd);
+ }
+ else if (sd.isMap())
+ {
+ return LLColor3 ( sd.get("x").asReal(), sd.get("y").asReal(), sd.get("z").asReal() );
+ }
+ else
+ {
+ return LLColor3::black;
+ }
+}
+
LLFastTimer::DeclareTimer FTM_SYNTAX_COLORING("Syntax Coloring");
// Walk through a string, applying the rules specified by the keyword token list and
@@ -360,10 +616,10 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
{
return;
}
-
+
S32 text_len = wtext.size() + 1;
- seg_list->push_back( new LLNormalTextSegment( defaultColor, 0, text_len, editor ) );
+ seg_list->push_back( new LLNormalTextSegment( defaultColor, 0, text_len, editor ) );
const llwchar* base = wtext.c_str();
const llwchar* cur = base;
@@ -398,7 +654,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
continue;
}
- // cur is now at the first non-whitespace character of a new line
+ // cur is now at the first non-whitespace character of a new line
// Line start tokens
{
@@ -416,7 +672,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
cur++;
}
S32 seg_end = cur - base;
-
+
//create segments from seg_start to seg_end
insertSegments(wtext, *seg_list,cur_token, text_len, seg_start, seg_end, defaultColor, editor);
line_done = TRUE; // to break out of second loop.
@@ -461,14 +717,14 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
seg_start = cur - base;
cur += cur_delimiter->getLengthHead();
-
+
LLKeywordToken::TOKEN_TYPE type = cur_delimiter->getType();
- if( type == LLKeywordToken::TWO_SIDED_DELIMITER || type == LLKeywordToken::DOUBLE_QUOTATION_MARKS )
+ if( type == LLKeywordToken::TT_TWO_SIDED_DELIMITER || type == LLKeywordToken::TT_DOUBLE_QUOTATION_MARKS )
{
while( *cur && !cur_delimiter->isTail(cur))
{
// Check for an escape sequence.
- if (type == LLKeywordToken::DOUBLE_QUOTATION_MARKS && *cur == '\\')
+ if (type == LLKeywordToken::TT_DOUBLE_QUOTATION_MARKS && *cur == '\\')
{
// Count the number of backslashes.
S32 num_backslashes = 0;
@@ -515,7 +771,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
}
else
{
- llassert( cur_delimiter->getType() == LLKeywordToken::ONE_SIDED_DELIMITER );
+ llassert( cur_delimiter->getType() == LLKeywordToken::TT_ONE_SIDED_DELIMITER );
// Left side is the delimiter. Right side is eol or eof.
while( *cur && ('\n' != *cur) )
{
@@ -561,7 +817,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
insertSegments(wtext, *seg_list,cur_token, text_len, seg_start, seg_end, defaultColor, editor);
}
- cur += seg_len;
+ cur += seg_len;
continue;
}
}
@@ -577,7 +833,7 @@ void LLKeywords::findSegments(std::vector<LLTextSegmentPtr>* seg_list, const LLW
void LLKeywords::insertSegments(const LLWString& wtext, std::vector<LLTextSegmentPtr>& seg_list, LLKeywordToken* cur_token, S32 text_len, S32 seg_start, S32 seg_end, const LLColor4 &defaultColor, LLTextEditor& editor )
{
std::string::size_type pos = wtext.find('\n',seg_start);
-
+
while (pos!=-1 && pos < (std::string::size_type)seg_end)
{
if (pos!=seg_start)
@@ -656,7 +912,7 @@ void LLKeywords::dump()
void LLKeywordToken::dump()
{
- llinfos << "[" <<
+ llinfos << "[" <<
mColor.mV[VX] << ", " <<
mColor.mV[VY] << ", " <<
mColor.mV[VZ] << "] [" <<