00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029 #include <iostream>
00030 #include <fstream>
00031 #include <deque>
00032
00033 #include "Tokenizer.h"
00034 #include "TAOConstructor.h"
00035 #include "ParserExceptions.h"
00036
00037 using namespace tao_constructor;
00038
00039 #define PRINT(X) //cout<<"parser: "<<X<<endl
00040
00041 class TAOParser {
00042 public:
00043
00044
00045
00046
00047
00048
00049 enum TokenType{
00050 tkn_null=0,
00051 tkn_text=1,
00052
00053 tkn_tao,
00054 tkn_tao_behs,
00055 tkn_tao_start_beh,
00056 tkn_tao_behs_bgn,
00057 tkn_tao_beh,
00058 tkn_tao_start,
00059 tkn_tao_call_task,
00060 tkn_tao_alloc,
00061 tkn_tao_role,
00062 tkn_tao_stop,
00063 tkn_tao_next,
00064 tkn_tao_next_op,
00065 tkn_tao_behs_end,
00066
00067 tkn_bopen,
00068 tkn_bclose,
00069 tkn_fopen,
00070 tkn_fclose,
00071 tkn_next,
00072 tkn_semicol,
00073 tkn_col,
00074 tkn_slesh,
00075 };
00076
00077 typedef Parser::TokenStream<TokenType> tstream;
00078 typedef Parser::Token<TokenType> Token;
00079
00080 struct TokenizerInit{
00081 void init(Parser::TokenizerData<TokenType>& tkn){
00082 tkn.string_token["TAO"]=tkn_tao;
00083 tkn.string_token["TAO_PLANS"]=tkn_tao_behs;
00084 tkn.string_token["TAO_START_PLAN"]=tkn_tao_start_beh;
00085 tkn.string_token["TAO_BGN"]=tkn_tao_behs_bgn;
00086 tkn.string_token["TAO_PLAN"]=tkn_tao_beh;
00087 tkn.string_token["TAO_START_CONDITION"]=tkn_tao_start;
00088 tkn.string_token["TAO_CALL_TASK"]=tkn_tao_call_task;
00089 tkn.string_token["TAO_ALLOCATE"]=tkn_tao_alloc;
00090 tkn.string_token["TAO_SUBPLAN"]=tkn_tao_role;
00091 tkn.string_token["TAO_STOP_CONDITION"]=tkn_tao_stop;
00092 tkn.string_token["TAO_NEXT"]=tkn_tao_next;
00093 tkn.string_token["TAO_NEXT_PLAN"]=tkn_tao_next_op;
00094 tkn.string_token["TAO_END"]=tkn_tao_behs_end;
00095
00096 tkn.spec_token['(']=tkn_bopen;
00097 tkn.spec_token[')']=tkn_bclose;
00098 tkn.spec_token['{']=tkn_fopen;
00099 tkn.spec_token['}']=tkn_fclose;
00100 tkn.spec_token[',']=tkn_col;
00101 tkn.spec_token[';']=tkn_semicol;
00102 tkn.spec_token['/']=tkn_slesh;
00103 }
00104 bool isTokenSumbol(char c){
00105 return
00106 ('0'<=c and c<='9') or
00107 ('a'<=c and c<='z') or
00108 ('A'<=c and c<='Z') or
00109 (c=='_')
00110 ;
00111 }
00112 bool isDelimiter(char c){
00113 return not isTokenSumbol(c);
00114 }
00115 bool isFlowControl(char c){
00116 return c==10 or c==13;
00117 }
00118 };
00119
00120 struct TokenizerContext {
00121 Parser::Tokenizer<TokenType, TokenizerInit> tokenizer;
00122 std::stringstream ss_fullText;
00123 stringstream buffer;
00124 size_t start_index, index;
00125 ifstream& file;
00126 char c;
00127
00128 TokenizerContext(ifstream& file)
00129 : start_index(0), index(0), file(file), c(0)
00130 { }
00131 };
00132
00133 bool skipComments( TokenizerContext& ctx ) {
00134 if(ctx.c=='/' and ctx.file.eof()==false){
00135 char cc;
00136 ctx.file.read(&cc, 1); ctx.ss_fullText<<cc;
00137 if(cc=='/'){
00138 ctx.index++;
00139 while(cc!='\n' and ctx.file.eof()==false){
00140 ctx.file.read(&cc, 1); saveReadedChar(ctx, cc); ctx.index++;
00141 }
00142 ctx.index++;
00143 return true;
00144 }
00145 else{
00146 ctx.tokenizer.searchToken(ctx.index++, ctx.buffer, ctx.start_index, ctx.c, tokens);
00147 ctx.c=cc;
00148 }
00149 }
00150 return false;
00151 }
00152
00153 void saveReadedChar(TokenizerContext& ctx , char c){
00154 ctx.ss_fullText<<c;
00155 if(c=='\n') lines.push_back(ctx.index);
00156 }
00157
00158 void searchLineInfo(int i, int& line, int& pos){
00159 line=0;
00160 for(size_t n=0;n<lines.size();n++){
00161 if(i<lines[n]){
00162 pos = i-lines[line-2];
00163 return;
00164 }
00165 line = n+2;
00166 }
00167 }
00168
00169 string translateToken(TokenType tokenType) {
00170
00171 for (map<string, TokenType>::iterator it = _stringTokens.begin();
00172 it != _stringTokens.end();
00173 ++it)
00174 {
00175 if (it->second == tokenType)
00176 return it->first;
00177 }
00178
00179 for (map<char, TokenType>::iterator it = _specTokens.begin();
00180 it != _specTokens.end();
00181 ++it)
00182 {
00183 if (it->second == tokenType)
00184 return string(1, it->first);
00185 }
00186
00187 return "";
00188 }
00189
00190
00191
00192
00193
00194 template <typename ExceptionType>
00195 void throwException(Token token) {
00196 int line;
00197 int position;
00198
00199 searchLineInfo(token.start, line, position);
00200
00201 throw ExceptionType("", line, position);
00202 }
00203
00204 template <typename ExceptionType>
00205 void throwException(Token token, string param) {
00206 int line;
00207 int position;
00208
00209 searchLineInfo(token.start, line, position);
00210
00211 throw ExceptionType(param, line, position);
00212 }
00213
00214 template <typename ExceptionType>
00215 void throwException(Token token, string param1, string param2) {
00216 int line;
00217 int position;
00218
00219 searchLineInfo(token.start, line, position);
00220
00221 throw ExceptionType(param1, param2, line, position);
00222 }
00223
00224
00225
00226
00227
00228
00229 bool testToken(tstream& stream, TokenType tokenType) {
00230 return stream.last().type == tokenType;
00231 }
00232
00233 string parseText(tstream& stream) {
00234 return parseToken(stream, tkn_text).text;
00235 }
00236
00237 Token parseToken(tstream& stream, TokenType tokenType) {
00238 if (stream.eof())
00239 throwException<UnexpectedEndOfFile>(stream.first());
00240
00241 Token token;
00242 stream >> token;
00243
00244
00245
00246 if (token.type != tokenType)
00247 throwException<UnexpectedToken>(token, token.text, translateToken(tokenType));
00248
00249 return token;
00250 }
00251
00252 size_t findClosingBracket(tstream& stream, TokenType openToken, TokenType closeToken) {
00253 size_t foundIndex = tkn_search_close_parent(stream, openToken, closeToken);
00254
00255 if (foundIndex == size_t(-1))
00256 throw ClosingBracketNotFound();
00257
00258 return foundIndex;
00259 }
00260
00269 string parseBracketContent(tstream& stream) {
00270 Token tkn;
00271 TokenType openingBracket;
00272 TokenType closingBracket;
00273 stream >> tkn;
00274
00275 if (tkn.type == tkn_bopen) {
00276 openingBracket = tkn.type;
00277 closingBracket = tkn_bclose;
00278 } else if (tkn.type == tkn_fopen) {
00279 openingBracket = tkn.type;
00280 closingBracket = tkn_fclose;
00281 } else
00282 throwException<UnexpectedToken>(tkn, tkn.text, "(' or '{");
00283
00284
00285 size_t foundIndex = findClosingBracket(stream, openingBracket, closingBracket);
00286
00287 string resultString;
00288 long startPosition = -1;
00289 long endPosition = -1;
00290
00291 while (foundIndex > stream.i + 1) {
00292 stream >> tkn;
00293 resultString += tkn.text;
00294
00295 if (startPosition < 0)
00296 startPosition = tkn.start;
00297 }
00298
00299 endPosition = tkn.end;
00300
00301
00302 stream >> tkn;
00303
00304 if (startPosition >= 0 and endPosition > startPosition)
00305 resultString = fullText.substr(startPosition, endPosition - startPosition);
00306
00307 return resultString;
00308 }
00309
00310 bool skipTo(tstream& stream, TokenType targetToken, bool throwOnNotFound = true) {
00311 Token tkn;
00312
00313 while (!stream.eof()) {
00314
00315 if (stream.first().type == targetToken)
00316 return true;
00317
00318 stream >> tkn;
00319 }
00320
00321 if (throwOnNotFound)
00322 throw UnexpectedEndOfFile(translateToken(targetToken));
00323
00324 return false;
00325 }
00326
00335 string parseMacroCall(tstream& stream, TokenType expectedToken) {
00336 parseToken(stream, expectedToken);
00337 return parseBracketContent(stream);
00338 }
00339
00340
00341
00342
00343
00344 void parseTaoStartBeh(tstream& stream) {
00345 string startBehName = parseMacroCall(stream, tkn_tao_start_beh);
00346 constructor.currentTao().start = startBehName;
00347 }
00348
00349 void parseStart(tstream& stream) {
00350 string startBehConditions = parseMacroCall(stream, tkn_tao_start);
00351 constructor.currentTao().currentBeh().start = startBehConditions;
00352 }
00353
00354 void parseCallTask(tstream& stream) {
00355 string callTaskName = parseMacroCall(stream, tkn_tao_call_task);
00356 constructor.currentTao().currentBeh().task_calls.push_back(callTaskName);
00357 }
00358
00359 void parseRole(tstream& stream) {
00360 string taoRole = parseMacroCall(stream, tkn_tao_role);
00361 constructor.currentTao().currentBeh().alloc.roles.push_back(taoRole);
00362 }
00363
00364 void parseAlloc(tstream& stream) {
00365 parseToken(stream, tkn_tao_alloc);
00366 string protocol = parseBracketContent(stream);
00367 constructor.currentTao().currentBeh().alloc.protocol = protocol;
00368 parseToken(stream, tkn_fopen);
00369
00370 while (!stream.eof()) {
00371 Token tkn = stream.first();
00372
00373 switch(tkn.type) {
00374 case tkn_tao_role:
00375 parseRole(stream);
00376 break;
00377 case tkn_fclose:
00378 stream >> tkn;
00379 return;
00380 default:
00381 stream >> tkn;
00382 break;
00383 }
00384 }
00385 }
00386
00387 void parseStop(tstream& stream) {
00388 string stopCondition = parseMacroCall(stream, tkn_tao_stop);
00389 constructor.currentTao().currentBeh().stop = stopCondition;
00390 }
00391
00392 void parseNextOp(tstream& stream) {
00393 string nextOp = parseMacroCall(stream, tkn_tao_next_op);
00394 constructor.currentTao().currentBeh().next.next_ops.push_back(nextOp);
00395 }
00396
00397 void parseNext(tstream& stream) {
00398 string protocol = parseMacroCall(stream, tkn_tao_next);
00399 constructor.currentTao().currentBeh().next.protocol = protocol;
00400 parseToken(stream, tkn_fopen);
00401
00402 while (!stream.eof()) {
00403 Token tkn = stream.first();
00404
00405 switch(tkn.type) {
00406 case tkn_tao_next_op:
00407 parseNextOp(stream);
00408 break;
00409 case tkn_fclose:
00410 stream >> tkn;
00411 return;
00412 default:
00413 stream >> tkn;
00414 break;
00415 }
00416 }
00417 }
00418
00419 void parseBeh(tstream& stream) {
00420
00421 parseToken(stream, tkn_tao_beh);
00422 string behName = parseBracketContent(stream);
00423 parseToken(stream, tkn_fopen);
00424
00425 constructor.currentTao().createBeh(behName);
00426
00427 size_t endOfBeh = findClosingBracket(stream, tkn_fopen, tkn_fclose);
00428
00429 while (!stream.eof()) {
00430 Token tkn = stream.first();
00431
00432 if (endOfBeh == stream.i) {
00433 constructor.currentTao().add();
00434 return;
00435 }
00436
00437 switch(tkn.type) {
00438 case tkn_tao_start:
00439 parseStart(stream);
00440 break;
00441 case tkn_tao_call_task:
00442 parseCallTask(stream);
00443 break;
00444 case tkn_tao_alloc:
00445 parseAlloc(stream);
00446 break;
00447 case tkn_tao_stop:
00448 parseStop(stream);
00449 break;
00450 case tkn_tao_next:
00451 parseNext(stream);
00452 break;
00453 default:
00454 stream >> tkn;
00455 break;
00456 }
00457 }
00458
00459 constructor.currentTao().drop();
00460 }
00461
00462 void parseTaoBehs(tstream& stream) {
00463 parseToken(stream, tkn_fopen);
00464
00465 while (!stream.eof()) {
00466 Token tkn = stream.first();
00467
00468 switch(tkn.type) {
00469 case tkn_tao_beh:
00470 parseBeh(stream);
00471 break;
00472 case tkn_fclose:
00473 stream >> tkn;
00474 return;
00475 default:
00476 stream >> tkn;
00477 break;
00478 }
00479 }
00480 }
00481
00482 void parseTaoBehsBgn(tstream& stream) {
00483 parseToken(stream, tkn_tao_behs_bgn);
00484 parseTaoBehs(stream);
00485 }
00486
00487 void parseTaoBehsDeclaration(tstream& stream) {
00488
00489 parseToken(stream, tkn_tao_behs);
00490 string behs = parseBracketContent(stream);
00491 }
00492
00493 void parseTao(tstream& stream){
00494
00495 _curr_tao_name = parseMacroCall(stream, tkn_tao);
00496 parseToken(stream, tkn_fopen);
00497
00498 constructor.currentTao().name = _curr_tao_name;
00499
00500 skipTo(stream, tkn_tao_behs, true);
00501 parseTaoBehsDeclaration(stream);
00502
00503 skipTo(stream, tkn_tao_start_beh, true);
00504 parseTaoStartBeh(stream);
00505
00506 skipTo(stream, tkn_tao_behs_bgn, true);
00507 parseTaoBehsBgn(stream);
00508
00509 }
00510
00511 string filename;
00512
00513 void parse(tstream& stream) {
00514 try {
00515 constructor.create();
00516 parseTao(stream);
00517 constructor.add();
00518
00519 } catch (ParserException& e) {
00520 constructor.drop();
00521 errors << "Exception occured in file '" << filename << "'" << endl;
00522 errors << e.what() << endl;
00523 }
00524 }
00525
00526
00527
00528
00529
00530
00531 std::string fullText;
00532 tstream tokens;
00533 TAOConstructor constructor;
00534 std::stringstream errors;
00535 std::vector<int> lines;
00536 Token _curr_tao;
00537 std::string _curr_tao_name;
00538 Token _curr_state;
00539 std::string _curr_state_name;
00540
00541 map<string, TokenType> _stringTokens;
00542 map<char, TokenType> _specTokens;
00543
00544 TAOParser(string file):filename(file), constructor(errors, file){}
00545
00546 TAOConstructor& main()
00547 {
00548 PRINT(__TIME__);
00549 if(true){
00550 PRINT("Read input file");
00551 ifstream file(filename.c_str());
00552 TokenizerContext ctx(file);
00553
00554 _stringTokens = ctx.tokenizer.string_token;
00555 _specTokens = ctx.tokenizer.spec_token;
00556
00557 if(ctx.file.is_open()){
00558 PRINT("File ("<<filename<<") is open");
00559
00560 while(ctx.file.eof()==false){
00561 ctx.file.read(&ctx.c, 1);
00562 saveReadedChar(ctx, ctx.c);
00563
00564 if (skipComments(ctx))
00565 continue;
00566
00567 ctx.tokenizer.searchToken(ctx.index++, ctx.buffer, ctx.start_index, ctx.c, tokens);
00568 }
00569
00570 PRINT("End of file");
00571 fullText = ctx.ss_fullText.str();
00572 file.close();
00573 }else{
00574 throw PEFileNotFound(filename);
00575 }
00576 }
00577
00578 if(true){
00579 while(not tokens.eof()){
00580 if (skipTo(tokens, tkn_tao, false))
00581 parse(tokens);
00582 }
00583
00584
00585 }
00586 return constructor;
00587 }
00588
00589 };
00590
00591 TAOParser* createTAO(string filename) {
00592 return new TAOParser(filename);
00593 }
00594
00595 TAOConstructor& parseTAO(TAOParser* p) {
00596 return p->main();
00597 }
00598
00599 void del(TAOParser* tao) {
00600 delete tao;
00601 }