#include "SqlParser.h" #include "SqlAstSelect.h" #include #include using namespace SqlAst; Keyword isKeyword(const QString &symbol) { static std::unordered_map lookup_map = { { "as", Keyword::As }, { "by", Keyword::By }, { "delete", Keyword::Delete }, { "from", Keyword::From }, { "group", Keyword::Group }, { "insert", Keyword::Insert }, { "order", Keyword::Order }, { "select", Keyword::Select }, { "update", Keyword::Update }, { "where", Keyword::Where } }; auto res = lookup_map.find(symbol.toLower().toUtf8().data()); if (res != lookup_map.end()) return res->second; return Keyword::NotAKeyword; } /* Put tokens on a stack Every time something is put on the stack see if it matches a rule The stack needs to contain both tokens from the lexical analyzer as tokens for reductions done by the parser. Matching rules, as we need to match against the top of the stack we should match the rules end to start. Meaning if we have on the stack A B C then we need to consider rules ending with a C */ class StackItem { public: int Token; }; SqlParser::SqlParser(SqlLexer &lexer) : lexer(lexer) { } std::shared_ptr SqlParser::parse() { // Basic algo: // LOOP // GET token // IF NOT try_reduce(token) // THEN SHIFT // END LOOP std::shared_ptr result; while (true) { SqlToken token = lexer.nextBasicToken(); if (token.ok) { if (token.tokenType == BasicTokenType::Symbol) { Keyword kw = isKeyword(token.out); switch (kw) { case Keyword::Select: parseSelect(*this); break; case Keyword::NotAKeyword: default: // unexpected break; } } else if (token.tokenType == BasicTokenType::End) { // Are we at the top level? return result; } } else { // error during lexical analysis, need to recover throw std::runtime_error("Unrecognized input"); } } } //bool try_reduce(SqkToken token) //{ // // what state are we in? what are we expecting //}