47 lines
1.1 KiB
C++
47 lines
1.1 KiB
C++
#include "Lexer.hpp"
|
|
#include "TokenList.hpp"
|
|
|
|
#include <iostream>
|
|
#include <fstream>
|
|
#include <map>
|
|
|
|
#include <utils/src/string.hpp>
|
|
|
|
using namespace Blang;
|
|
|
|
static std::map<Token::TokenKind,std::regex> regexMap;
|
|
|
|
TokenList Lexer::tokenize(const std::string& t){
|
|
|
|
string text(t);
|
|
|
|
std::string::const_iterator itt;
|
|
TokenList tokenList;
|
|
|
|
for ( itt = text.begin (); itt != text.end (); ++ itt ){
|
|
if(text.substr(itt, itt+1) == "If"){
|
|
itt += 2;
|
|
}else if(*itt == '\"'){
|
|
int bpos = itt;
|
|
while(*itt != '\"')
|
|
itt++;
|
|
tokenList << Token(Token::Token_StringLiteral, text.substr(itt, itt+1));
|
|
} else if(std::string(" \t").find(*itt) >= 0){
|
|
tokenList << Token(Token::Token_Whitespace, itt);
|
|
}
|
|
}
|
|
}
|
|
|
|
TokenList Lexer::tokenizeFile(const std::string& filename){
|
|
std::ifstream fileStream(filename);
|
|
if(fileStream){
|
|
std::string text, line;
|
|
while(getline(fileStream, line)){
|
|
text += line + std::endl;
|
|
}
|
|
tokenizeFile(text);
|
|
} else {
|
|
std::cout << "Error: Unable to read file" << std::endl;
|
|
}
|
|
}
|