Add query parser class

This commit is contained in:
Iain Benson 2018-11-18 21:25:36 +00:00 committed by Luis Ángel San Martín
parent d3de52ca82
commit 4a50d438d0
3 changed files with 305 additions and 3 deletions

View File

@ -67,8 +67,8 @@ macx {
QT += macextras gui-private
}
unix:!macx {
CONFIG += c++11
unix {
CONFIG += c++1z
}
#CONFIG += release
@ -147,6 +147,7 @@ HEADERS += comic_flow.h \
yacreader_comics_selection_helper.h \
yacreader_comic_info_helper.h \
db/reading_list.h \
db/query_parser.h \
current_comic_view_helper.h \
lexertl/parser/tokeniser/re_token.hpp \
lexertl/parser/tokeniser/re_tokeniser.hpp \
@ -254,7 +255,8 @@ SOURCES += comic_flow.cpp \
yacreader_comics_selection_helper.cpp \
yacreader_comic_info_helper.cpp\
db/reading_list.cpp \
current_comic_view_helper.cpp
current_comic_view_helper.cpp \
db/query_parser.cpp
!CONFIG(no_opengl) {
SOURCES += ../common/gl/yacreader_flow_gl.cpp

View File

@ -0,0 +1,237 @@
#include "query_parser.h"
#include <QVariant>
#include <sstream>
#include <type_traits>
#include <numeric>
const std::map<QueryParser::FieldType, std::vector<std::string>> QueryParser::fieldNames {
{FieldType::numeric, {"numpages", "number", "count", "arcnumber", "arccount"}},
{FieldType::text, {"title", "volume", "storyarc", "genere", "writer", "penciller", "inker", "colorist", "letterer",
"coverartist", "publisher", "format", "agerating", "synopsis", "characters", "notes"}},
{FieldType::boolean, {"isbis", "color"}},
{FieldType::date, {"date"} } };
int QueryParser::TreeNode::buildSqlString(std::string& sqlString, int bindPosition) const {
if (t == "token") {
++bindPosition;
std::ostringstream oss;
if (children[0].t == "all") {
oss << "(";
for (const auto& field: fieldNames.at(FieldType::text)) {
oss << "UPPER(ci." << field << ") LIKE UPPER(:bindPosition" << bindPosition << ") OR ";
}
oss << "UPPER(c.fileName) LIKE UPPER(:bindPosition" << bindPosition << ")) ";
} else if (isIn(fieldType(children[0].t), FieldType::numeric, FieldType::boolean)) {
oss << "ci." << children[0].t << " = :bindPosition" << bindPosition << " ";
} else {
oss << "(UPPER(ci." << children[0].t << ") LIKE UPPER(:bindPosition" << bindPosition << ")) ";
}
sqlString += oss.str();
} else if (t == "not") {
sqlString += "(NOT ";
bindPosition = children[0].buildSqlString(sqlString, bindPosition);
sqlString += ")";
} else {
sqlString += "(";
bindPosition = children[0].buildSqlString(sqlString, bindPosition);
sqlString += " " + t + " ";
bindPosition = children[1].buildSqlString(sqlString, bindPosition);
sqlString += ")";
}
return bindPosition;
}
int QueryParser::TreeNode::bindValues(QSqlQuery& selectQuery, int bindPosition) const {
if (t == "token") {
std::ostringstream oss;
oss << ":bindPosition" << ++bindPosition;
if (isIn(fieldType(children[0].t), FieldType::numeric, FieldType::boolean)) {
selectQuery.bindValue(oss.str().c_str(), std::stoi(children[1].t));
} else {
selectQuery.bindValue(oss.str().c_str(), ("%%"+children[1].t+"%%").c_str());
}
} else if (t == "not") {
bindPosition = children[0].bindValues(selectQuery, bindPosition);
} else {
bindPosition = children[0].bindValues(selectQuery, bindPosition);
bindPosition = children[1].bindValues(selectQuery, bindPosition);
}
return bindPosition;
}
QueryParser::QueryParser(): lexScanner(0) {
lexScanner.push("[()]", static_cast<std::underlying_type<TokenType>::type>(TokenType::opcode));
lexScanner.push("@[^:]+:[^\\\")\\s]+", static_cast<std::underlying_type<TokenType>::type>(TokenType::atWord));
lexScanner.push("[^\\\"()\\s]+", static_cast<std::underlying_type<TokenType>::type>(TokenType::word));
lexScanner.push("\\\".*?\\\"", static_cast<std::underlying_type<TokenType>::type>(TokenType::quotedWord));
lexScanner.push("\\s+", static_cast<std::underlying_type<TokenType>::type>(TokenType::space));
lexertl::generator::build(lexScanner, sm);
}
QueryParser::TreeNode QueryParser::parse(const std::string& expr) {
tokenize(expr);
auto prog = orExpression();
if (!isEof()) {
throw std::invalid_argument("Extra characters at end of search");
}
return prog;
}
std::string QueryParser::toLower(const std::string& string) {
std::string res(string);
std::transform(res.begin(), res.end(), res.begin(), ::tolower);
return res;
}
std::string QueryParser::token(bool advance) {
if (isEof()) {
return "";
}
auto res = (tokenType() == TokenType::quotedWord)?iter->substr(1,1):iter->str();
if (advance) {
this->advance();
}
return res;
}
std::string QueryParser::lcaseToken(bool advance) {
if (isEof()) {
return "";
}
auto res = (tokenType() == TokenType::quotedWord)?iter->substr(1,1):iter->str();
if (advance) {
this->advance();
}
return toLower(res);
}
QueryParser::TokenType QueryParser::tokenType() {
if (isEof()) {
return TokenType::eof;
}
return TokenType(iter->id);
}
bool QueryParser::isEof() const {
return iter == end;
}
void QueryParser::advance() {
++iter;
if (tokenType() == TokenType::space) advance();
}
QueryParser::FieldType QueryParser::fieldType(const std::string& str) {
for (const auto& names : fieldNames) {
if (std::find(names.second.begin(), names.second.end(), toLower(str)) != names.second.end()) {
return names.first;
}
}
return FieldType::unknown;
}
void QueryParser::tokenize (const std::string& expr) {
// TODO: Strip out escaped backslashes, quotes and parens so that the
// lex scanner doesn't get confused. We put them back later.
iter = lexertl::siterator(expr.begin(), expr.end(), sm);
/* for (; !isEof() ; advance())
{
std::cout << "Id: " << iter->id << ", Token: '" << token() << "'\n";
}
iter = lexertl::siterator(expr.begin(), expr.end(), sm);
*/
}
std::string QueryParser::join(const std::vector<std::string>& strings, const std::string& delim) {
return std::accumulate(strings.begin(), strings.end(), std::string(),
[&delim](const std::string& a, const std::string& b) -> std::string {
return a + (a.length() > 0 && b.length() > 0 ? delim : "") + b;
} );
}
std::vector<std::string> QueryParser::split(const std::string& string, char delim) {
std::istringstream iss(string);
std::vector<std::string> words;
while(iss) {
std::string substr;
std::getline(iss, substr, delim);
words.push_back(substr);
}
return words;
}
QueryParser::TreeNode QueryParser::orExpression() {
auto lhs = andExpression();
if (lcaseToken() == "or") {
advance();
return {"or", {lhs, orExpression()}};
}
return lhs;
}
QueryParser::TreeNode QueryParser::andExpression() {
auto lhs = notExpression();
if (lcaseToken() == "and") {
advance();
return {"and", {lhs, andExpression()}};
}
if ((isIn(tokenType(), TokenType::atWord, TokenType::word, TokenType::quotedWord) || token() == "(") && lcaseToken() != "or") {
return {"and", {lhs, andExpression()}};
}
return lhs;
}
QueryParser::TreeNode QueryParser::notExpression() {
if (lcaseToken() == "not") {
advance();
return {"not", {notExpression()}};
}
return locationExpression();
}
QueryParser::TreeNode QueryParser::locationExpression() {
if (tokenType() == TokenType::opcode && token() == "(") {
advance();
auto res = orExpression();
if (tokenType() != TokenType::opcode || token(true) != ")") {
throw std::invalid_argument("missing )'");
}
return res;
}
if (!isIn(tokenType(), TokenType::atWord, TokenType::word, TokenType::quotedWord)) {
throw std::invalid_argument("Invalid syntax. Expected a lookup name or a word");
}
return baseToken();
}
QueryParser::TreeNode QueryParser::baseToken() {
if (tokenType() == TokenType::quotedWord) {
return {"token", {{"all", {}}, {token(true), {}}}};
}
auto words(split(token(true), ':'));
if (words.size() > 1 && fieldType(words[0]) != FieldType::unknown) {
auto loc(toLower(words[0]));
words.erase(words.begin());
if (words.size() == 1 && tokenType() == TokenType::quotedWord) {
return {"token", {{loc, {}}, {token(true), {}}}};
}
return {"token", {{loc, {}}, {join(words, ":"), {}}}};
}
return {"token", {{"all", {}}, {join(words, ":"), {}}}};
}

View File

@ -0,0 +1,63 @@
#ifndef QUERY_PARSER_H
#define QUERY_PARSER_H
#include "lexertl/generator.hpp"
#include "lexertl/iterator.hpp"
#include <map>
#include <QSqlQuery>
#include <string>
#include <vector>
class QSqlQuery;
class QueryParser {
public:
enum class TokenType {eof, opcode, atWord, word, quotedWord, space};
struct TreeNode {
std::string t;
std::vector<TreeNode> children;
int buildSqlString(std::string& sqlString, int bindPosition = 0) const;
int bindValues(QSqlQuery& selectQuery, int bindPosition = 0) const;
};
explicit QueryParser();
TreeNode parse(const std::string& expr);
private:
static std::string toLower(const std::string& string);
std::string token(bool advance = false);
std::string lcaseToken(bool advance = false);
TokenType tokenType();
bool isEof() const;
void advance();
template<typename First, typename ... T>
static bool isIn(First &&first, T && ... t) {return ((first == t) || ...);}
enum class FieldType {unknown, numeric, text, boolean, date};
static FieldType fieldType(const std::string& str);
void tokenize (const std::string& expr);
static std::string join(const std::vector<std::string>& strings, const std::string& delim);
static std::vector<std::string> split(const std::string& string, char delim);
TreeNode orExpression();
TreeNode andExpression();
TreeNode notExpression();
TreeNode locationExpression();
TreeNode baseToken();
lexertl::rules lexScanner;
lexertl::state_machine sm;
lexertl::siterator iter;
const lexertl::siterator end;
static const std::map<FieldType, std::vector<std::string>> fieldNames;
};
#endif // QUERY_PARSER_H