neopb/neopb.cpp

69 lines
2.3 KiB
C++

#include <fstream>
#include <iostream>
#include <regex>
#include <string>
#include <unordered_map>
#include <vector>
#include "tokenizer.hpp"
std::vector<PBToken> tokenize(std::string code);
std::string readFileContents(std::string fname) {
std::ifstream ifs(fname);
std::string contents( (std::istreambuf_iterator<char>(ifs)), (std::istreambuf_iterator<char>()) );
return contents;
}
int main(int argc, char* argv[]) {
for (int n = 0; n < argc; n++) {
std::cout << "arg" << n << ": " << argv[n] << std::endl;
}
std::string infile = argc > 1 ? argv[1] : "";
std::string code = "";
if(infile.length() > 0) {
code = readFileContents(infile);
}
Tokenizer tokenizer = Tokenizer(code);
std::cout << "code: " << tokenizer.dump() << std::endl;
std::vector<PBToken> tokens = tokenizer.tokenize();
for(int i = 0; i < tokens.size(); i++) {
std::cout << tokens[i].value << std::endl;
}
return 0;
}
//std::vector<PBToken> tokenize(std::string code) {
// const PBToken tokenize_one = [](std::string fragment) {
// //const std::unordered_map<PBTokenType, std::regex> tokentypes = std::unordered_map();
// const std::regex re_func("\bfunction\b", std::regex_constants::icase);
// const std::regex re_sub( "\bsub\b", std::regex_constants::icase);
// const std::regex re_end( "\bend\b", std::regex_constants::icase);
// const std::regex re_as("\bas\b", std::regex_constants::icase);
// const std::regex re_type("\blong\b", std::regex_constants::icase);
// const std::regex re_identifier("\b[a-zA-Z]+\b");
// const std::regex re_integer("\b[0-9]+\b");
// const std::regex re_string("\".*\"");
// const std::regex re_oparen("\(");
// const std::regex re_cparen("\)");
// const std::regex re_comma(",");
// const std::regex re_quote("'");
// const std::regex re_equals("=");
//
// PBTokenType tt = SUB;
// std::string val = fragment.trim();
//
//
//
// return { .type = tt, .value = val };
// };
// std::vector<PBToken> tokens();
// while(code.length() > 0) {
// int split = code.find(' ');
// std::string fragment = split > 0 ? code.substr(0, split) : code;
// tokens.push_back(fragment);
// }
// return tokens;
//}