Extra lines before and after query are removed. Whitespace at end of line is removed. SQL comments are converted to cpp style comments and are outside the string literal. To achieve this the function now uses the SQLLexer to know what is comment. This also required the additional capability in the lexer to also return whitespace and newline tokens. Also a few bugs in the lexer were fixed.
124 lines
3.2 KiB
C++
124 lines
3.2 KiB
C++
#include <gtest/gtest.h>
|
|
#include <gmock/gmock-matchers.h>
|
|
#include "SqlLexer.h"
|
|
#include "PrintTo_Qt.h"
|
|
|
|
using namespace testing;
|
|
|
|
TEST(SqlLexer, emptyInput)
|
|
{
|
|
QString input;
|
|
SqlLexer lexer(input, LexerState::Null);
|
|
|
|
int startpos, length;
|
|
BasicTokenType tokentype;
|
|
QString out;
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
|
|
ASSERT_THAT(startpos, Eq(0));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::End));
|
|
}
|
|
|
|
TEST(SqlLexer, lexer)
|
|
{
|
|
QString input = " SELECT ";
|
|
SqlLexer lexer(input, LexerState::Null);
|
|
|
|
int startpos, length;
|
|
BasicTokenType tokentype;
|
|
QString out;
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
|
|
ASSERT_THAT(startpos, Eq(1));
|
|
ASSERT_THAT(length, Eq(6));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Symbol));
|
|
ASSERT_THAT( out, Eq(QString("SELECT")) );
|
|
}
|
|
|
|
TEST(SqlLexer, lexerWithWhiteSpace)
|
|
{
|
|
QString input = " SELECT ";
|
|
SqlLexer lexer(input, LexerState::Null, true);
|
|
|
|
int startpos, length;
|
|
BasicTokenType tokentype;
|
|
QString out;
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
ASSERT_THAT(startpos, Eq(0));
|
|
ASSERT_THAT(length, Eq(1));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::WhiteSpace));
|
|
ASSERT_THAT(out, Eq(QString(" ")) );
|
|
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
ASSERT_THAT(startpos, Eq(1));
|
|
ASSERT_THAT(length, Eq(6));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Symbol));
|
|
ASSERT_THAT(out, Eq(QString("SELECT")) );
|
|
}
|
|
|
|
TEST(SqlLexer, lexer_quote_in_string)
|
|
{
|
|
QString input = " 'abc''def' ";
|
|
SqlLexer lexer(input, LexerState::Null);
|
|
|
|
int startpos, length;
|
|
BasicTokenType tokentype;
|
|
QString out;
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
|
|
ASSERT_THAT(startpos, Eq(1));
|
|
ASSERT_THAT(length, Eq(10));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::QuotedString));
|
|
ASSERT_THAT(out, Eq(QString("'abc''def'")) );
|
|
}
|
|
|
|
TEST(SqlLexer, lexer_comma_handling)
|
|
{
|
|
QString input = "abc,def";
|
|
SqlLexer lexer(input, LexerState::Null);
|
|
|
|
int startpos, length;
|
|
BasicTokenType tokentype;
|
|
QString out;
|
|
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
ASSERT_THAT(startpos, Eq(0));
|
|
ASSERT_THAT(length, Eq(3));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Symbol));
|
|
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
ASSERT_THAT(startpos, Eq(3));
|
|
ASSERT_THAT(length, Eq(1));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Comma));
|
|
ASSERT_THAT(out, Eq(QString(",")));
|
|
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
ASSERT_THAT(startpos, Eq(4));
|
|
ASSERT_THAT(length, Eq(3));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Symbol));
|
|
}
|
|
|
|
TEST(SqlLexer, lexer_cast)
|
|
{
|
|
QString input = "'1'::integer";
|
|
SqlLexer lexer(input, LexerState::Null);
|
|
|
|
int startpos, length;
|
|
BasicTokenType tokentype;
|
|
QString out;
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
|
|
ASSERT_THAT(startpos, Eq(0));
|
|
ASSERT_THAT(length, Eq(3));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::QuotedString));
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
|
|
ASSERT_THAT(startpos, Eq(3));
|
|
ASSERT_THAT(length, Eq(2));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Cast));
|
|
lexer.nextBasicToken(startpos, length, tokentype, out);
|
|
|
|
ASSERT_THAT(startpos, Eq(5));
|
|
ASSERT_THAT(length, Eq(7));
|
|
ASSERT_THAT(tokentype, Eq(BasicTokenType::Symbol));
|
|
}
|