pgLab/core/SqlParser.cpp

99 lines
2 KiB
C++
Raw Normal View History

#include "SqlParser.h"
#include "SqlAstSelect.h"
#include <stdexcept>
#include <unordered_map>
using namespace SqlAst;
2018-12-16 15:38:32 +01:00
Keyword isKeyword(const QString &symbol)
{
static std::unordered_map<std::string, Keyword> lookup_map = {
{ "as", Keyword::As },
{ "by", Keyword::By },
{ "delete", Keyword::Delete },
{ "from", Keyword::From },
{ "group", Keyword::Group },
{ "insert", Keyword::Insert },
{ "order", Keyword::Order },
{ "select", Keyword::Select },
{ "update", Keyword::Update },
{ "where", Keyword::Where }
};
auto res = lookup_map.find(symbol.toLower().toUtf8().data());
if (res != lookup_map.end())
return res->second;
2018-12-16 15:38:32 +01:00
return Keyword::NotAKeyword;
}
/*
Put tokens on a stack
Every time something is put on the stack see if it matches a rule
The stack needs to contain both tokens from the lexical analyzer as tokens for reductions done by the parser.
Matching rules, as we need to match against the top of the stack we should match the rules end to start.
Meaning if we have on the stack A B C then we need to consider rules ending with a C
*/
class StackItem {
public:
int Token;
};
SqlParser::SqlParser(SqlLexer &lexer)
: lexer(lexer)
{
}
2019-01-28 20:53:10 +01:00
std::shared_ptr<SqlAst::Node> SqlParser::parse()
{
// Basic algo:
// LOOP
// GET token
// IF NOT try_reduce(token)
// THEN SHIFT
// END LOOP
2019-01-28 20:53:10 +01:00
std::shared_ptr<SqlAst::Node> result;
2018-12-16 15:38:32 +01:00
while (true) {
SqlToken token = lexer.nextBasicToken();
if (token.ok) {
if (token.tokenType == BasicTokenType::Symbol) {
Keyword kw = isKeyword(token.out);
switch (kw) {
case Keyword::Select:
parseSelect(*this);
break;
case Keyword::NotAKeyword:
default:
// unexpected
break;
}
}
2019-01-28 20:53:10 +01:00
else if (token.tokenType == BasicTokenType::End) {
// Are we at the top level?
return result;
}
}
else {
// error during lexical analysis, need to recover
2019-01-28 20:53:10 +01:00
throw std::runtime_error("Unrecognized input");
}
}
}
//bool try_reduce(SqkToken token)
//{
// // what state are we in? what are we expecting
//}