#include "Lexer.hpp" #include "TokenList.hpp" #include #include #include #include using namespace Blang; static std::map regexMap; TokenList Lexer::tokenize(const std::string& t){ string text(t); std::string::const_iterator itt; TokenList tokenList; for ( itt = text.begin (); itt != text.end (); ++ itt ){ if(text.substr(itt, itt+1) == "If"){ itt += 2; }else if(*itt == '\"'){ int bpos = itt; while(*itt != '\"') itt++; tokenList << Token(Token::Token_StringLiteral, text.substr(itt, itt+1)); } else if(std::string(" \t").find(*itt) >= 0){ tokenList << Token(Token::Token_Whitespace, itt); } } } TokenList Lexer::tokenizeFile(const std::string& filename){ std::ifstream fileStream(filename); if(fileStream){ std::string text, line; while(getline(fileStream, line)){ text += line + std::endl; } tokenizeFile(text); } else { std::cout << "Error: Unable to read file" << std::endl; } }