00001 /* 00002 ----------------------------------------------------------------------------- 00003 This source file is part of OGRE 00004 (Object-oriented Graphics Rendering Engine) 00005 For the latest info, see http://www.stevestreeting.com/ogre/ 00006 00007 Copyright (c) 2000-2005 The OGRE Team 00008 Also see acknowledgements in Readme.html 00009 00010 This program is free software; you can redistribute it and/or modify it under 00011 the terms of the GNU General Public License as published by the Free Software 00012 Foundation; either version 2 of the License, or (at your option) any later 00013 version. 00014 00015 This program is distributed in the hope that it will be useful, but WITHOUT 00016 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 00017 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 00018 00019 You should have received a copy of the GNU General Public License along with 00020 this program; if not, write to the Free Software Foundation, Inc., 59 Temple 00021 Place - Suite 330, Boston, MA 02111-1307, USA, or go to 00022 http://www.gnu.org/copyleft/gpl.html. 00023 ----------------------------------------------------------------------------- 00024 */ 00025 00026 00027 #ifndef __Compiler2Pass_H__ 00028 #define __Compiler2Pass_H__ 00029 00030 #include <vector> 00031 #include "OgrePrerequisites.h" 00032 00033 namespace Ogre { 00034 00114 class _OgreExport Compiler2Pass 00115 { 00116 00117 protected: 00118 00119 // BNF operation types 00120 enum OperationType {otUNKNOWN, otRULE, otAND, otOR, otOPTIONAL, otREPEAT, otDATA, otNOT_TEST, otEND}; 00121 00125 struct TokenRule 00126 { 00127 OperationType operation; 00128 size_t tokenID; 00129 00130 TokenRule(void) : operation(otUNKNOWN), tokenID(0) {} 00131 TokenRule(const OperationType ot, const size_t token) 00132 : operation(ot), tokenID(token) {} 00133 }; 00134 00135 typedef std::vector<TokenRule> TokenRuleContainer; 00136 typedef TokenRuleContainer::iterator TokenRuleIterator; 00137 00138 static const size_t SystemTokenBase = 1000; 00139 enum SystemRuleToken { 00140 _no_token_ = SystemTokenBase, 00141 _character_, 00142 _value_, 00143 _no_space_skip_ 00144 }; 00145 00146 enum BNF_ID {BNF_UNKOWN = 0, 00147 BNF_SYNTAX, BNF_RULE, BNF_IDENTIFIER, BNF_IDENTIFIER_RIGHT, BNF_IDENTIFIER_CHARACTERS, BNF_ID_BEGIN, BNF_ID_END, 00148 BNF_CONSTANT_BEGIN, BNF_SET_RULE, BNF_EXPRESSION, 00149 BNF_AND_TERM, BNF_OR_TERM, BNF_TERM, BNF_TERM_ID, BNF_CONSTANT, BNF_OR, BNF_TERMINAL_SYMBOL, BNF_TERMINAL_START, 00150 BNF_REPEAT_EXPRESSION, BNF_REPEAT_BEGIN, BNF_REPEAT_END, BNF_SET, BNF_SET_BEGIN, BNF_SET_END, 00151 BNF_NOT_TEST, BNF_NOT_TEST_BEGIN, BNF_OPTIONAL_EXPRESSION, BNF_NOT_EXPRESSION, BNF_NOT_CHK, 00152 BNF_OPTIONAL_BEGIN, BNF_OPTIONAL_END, BNF_NO_TOKEN_START, BNF_SINGLEQUOTE, BNF_SINGLE_QUOTE_EXC, BNF_SET_END_EXC, 00153 BNF_ANY_CHARACTER, BNF_SPECIAL_CHARACTERS1, 00154 BNF_SPECIAL_CHARACTERS2, BNF_WHITE_SPACE_CHK, 00155 00156 BNF_LETTER, BNF_LETTER_DIGIT, BNF_DIGIT, BNF_WHITE_SPACE, 00157 BNF_ALPHA_SET, BNF_NUMBER_SET, BNF_SPECIAL_CHARACTER_SET1, 00158 BNF_SPECIAL_CHARACTER_SET2, BNF_SPECIAL_CHARACTER_SET3, BNF_NOT_CHARS 00159 }; 00160 00161 00163 struct LexemeTokenDef 00164 { 00165 size_t ID; 00166 bool hasAction; 00167 bool isNonTerminal; 00168 size_t ruleID; 00169 bool isCaseSensitive; 00170 String lexeme; 00171 00172 LexemeTokenDef(void) : ID(0), hasAction(false), isNonTerminal(false), ruleID(0), isCaseSensitive(false) {} 00173 LexemeTokenDef( const size_t ID, const String& lexeme, const bool hasAction = false, const bool caseSensitive = false ) 00174 : ID(ID) 00175 , hasAction(hasAction) 00176 , isNonTerminal(false) 00177 , ruleID(0) 00178 , isCaseSensitive(caseSensitive) 00179 , lexeme(lexeme) 00180 { 00181 } 00182 00183 }; 00184 00185 typedef std::vector<LexemeTokenDef> LexemeTokenDefContainer; 00186 typedef LexemeTokenDefContainer::iterator LexemeTokenDefIterator; 00187 00188 typedef std::map<std::string, size_t> LexemeTokenMap; 00189 typedef LexemeTokenMap::iterator TokenKeyIterator; 00191 00192 00194 struct TokenInst 00195 { 00196 size_t NTTRuleID; 00197 size_t tokenID; 00198 size_t line; 00199 size_t pos; 00200 bool found; 00201 }; 00202 00203 typedef std::vector<TokenInst> TokenInstContainer; 00204 typedef TokenInstContainer::iterator TokenInstIterator; 00205 00206 // token que, definitions, rules 00207 struct TokenState 00208 { 00209 TokenInstContainer tokenQue; 00210 LexemeTokenDefContainer lexemeTokenDefinitions; 00211 TokenRuleContainer rootRulePath; 00212 LexemeTokenMap lexemeTokenMap; 00213 }; 00214 00215 TokenState* mClientTokenState; 00216 00218 TokenState* mActiveTokenState; 00220 size_t mPass2TokenQuePosition; 00224 size_t mPreviousActionQuePosition; 00225 00227 const String* mSource; 00229 String mSourceName; 00230 size_t mEndOfSource; 00231 00232 size_t mCurrentLine; 00233 size_t mCharPos; 00234 00237 std::map<size_t, float> mConstants; 00240 std::map<size_t, String> mLabels; 00243 bool mLabelIsActive; 00246 size_t mActiveLabelKey; 00249 bool mNoSpaceSkip; 00252 bool mNoTerminalToken; 00253 00255 uint mActiveContexts; 00256 00265 bool doPass1(); 00266 00276 bool doPass2(); 00277 00283 virtual void executeTokenAction(const size_t tokenID) = 0; 00286 virtual void setupTokenDefinitions(void) = 0; 00293 const TokenInst& getNextToken(const size_t expectedTokenID = 0); 00296 const TokenInst& getCurrentToken(void); 00302 bool testNextTokenID(const size_t expectedTokenID); 00305 void replaceToken(void); 00311 float getNextTokenValue(void); 00317 const String& getNextTokenLabel(void); 00320 size_t getPass2TokenQueCount(void) const; 00325 size_t getRemainingTokensForAction(void) const; 00326 00335 void addLexemeToken(const String& lexeme, const size_t token, const bool hasAction = false, const bool caseSensitive = false); 00336 00345 void setClientBNFGrammer(void); 00346 00347 00348 00350 void findEOL(); 00351 00359 bool isFloatValue(float& fvalue, size_t& charsize) const; 00360 00369 bool isCharacterLabel(const size_t rulepathIDX); 00377 bool isLexemeMatch(const String& lexeme, const bool caseSensitive) const; 00379 bool positionToNextLexeme(); 00400 bool processRulePath( size_t rulepathIDX); 00401 00402 00405 void setActiveContexts(const uint contexts){ mActiveContexts = contexts; } 00406 00408 void skipComments(); 00409 00411 void skipEOL(); 00412 00414 void skipWhiteSpace(); 00415 00416 00425 bool ValidateToken(const size_t rulepathIDX, const size_t activeRuleID); 00426 00431 void verifyTokenRuleLinks(const String& grammerName); 00435 void checkTokenActionTrigger(void); 00440 String getBNFGrammerTextFromRulePath(size_t ruleID); 00441 00442 00443 private: 00444 // used for interpreting BNF script 00445 // keep it as static so that only one structure is created 00446 // no matter how many times this class is instantiated. 00447 static TokenState mBNFTokenState; 00448 // maintain a map of BNF grammer 00449 typedef std::map<String, TokenState> TokenStateContainer; 00450 static TokenStateContainer mClientTokenStates; 00452 void activatePreviousTokenAction(void); 00454 void initBNFCompiler(void); 00456 void buildClientBNFRulePaths(void); 00458 void modifyLastRule(const OperationType pendingRuleOp, const size_t tokenID); 00463 size_t getClientLexemeTokenID(const String& lexeme, const bool isCaseSensitive = false); 00465 void extractNonTerminal(const OperationType pendingRuleOp); 00467 void extractTerminal(const OperationType pendingRuleOp, const bool notoken = false); 00469 void extractSet(const OperationType pendingRuleOp); 00471 void extractNumericConstant(const OperationType pendingRuleOp); 00472 String getLexemeText(size_t& ruleID); 00473 00474 public: 00475 00477 Compiler2Pass(); 00478 virtual ~Compiler2Pass() {} 00479 00491 bool compile(const String& source, const String& sourceName); 00494 virtual const String& getClientBNFGrammer(void) = 0; 00495 00498 virtual const String& getClientGrammerName(void) = 0; 00499 00500 }; 00501 00502 } 00503 00504 #endif 00505
Copyright © 2000-2005 by The OGRE Team
This work is licensed under a Creative Commons Attribution-ShareAlike 2.5 License.
Last modified Sun Mar 12 14:37:38 2006