diff --git a/backend/src/app.ts b/backend/src/app.ts index 6937875..f6bdcc8 100644 --- a/backend/src/app.ts +++ b/backend/src/app.ts @@ -2,10 +2,12 @@ import express from 'express' import { json } from 'body-parser' import { Sequelize, Op } from 'sequelize' import { database, Character, Game, Pick, App } from './db' -import { lexr, parseOrderByString, FilterParser } from './tokenizer' +import { OrderByParser, FilterParser } from './tokenizer' const app = express() const jsonParser = json() +const fp = new FilterParser() +const obp = new OrderByParser() const port = 3001 app.get('/', (req, res) => { @@ -36,34 +38,19 @@ app.get('/api/game/:gameId', async (req, res) => { }) app.post('/api/game', jsonParser, async (req, res) => { - console.log(req.body) - const page = req.body.page || 0 - const orderBy = req.body.orderBy ? parseOrderByString(req.body.orderBy) : ['id'] + const orderBy = req.body.orderBy ? obp.parse(req.body.orderBy) : ['id'] const count = req.body.count || 10 - const filter = req.body.filter || '' + const filter = req.body.filter ? fp.parse(req.body.filter) : {} const gameData = await Game.findAll({ offset: page * count, limit: count, order: orderBy, - where: { - id: { [Op.eq]: 2 } - } + where: filter }) const pageCount = Math.ceil((await Character.count()) / count) - let fp = new FilterParser() - - fp.lexer - .input(filter) - .tokens() - .forEach((t) => { - console.log(t) - }) - - console.log(fp.parseFilter(filter)) - res.setHeader('Content-Type', 'application/json') res.send({ gameData, pageCount }) }) diff --git a/backend/src/tokenizer.ts b/backend/src/tokenizer.ts index 4e12c31..6e01ab2 100644 --- a/backend/src/tokenizer.ts +++ b/backend/src/tokenizer.ts @@ -1,35 +1,31 @@ import Tokenizr from 'tokenizr' -import ASTY from 'asty' import { Op } from 'sequelize' -export const lexr = new Tokenizr() +export class OrderByParser { + lexer = new Tokenizr() -lexr.rule(/[AND|OR]/, (ctx, m) => { - ctx.accept('conjunction', m[0]) -}) + constructor() { + this.lexer.rule(/,/, (ctx, m) => { + ctx.accept('spacer') + }) -export const orderByLexr = new Tokenizr() + this.lexer.rule(/ASC|DESC/, (ctx, m) => { + ctx.accept('direction', m[0]) + }) -orderByLexr.rule(/,/, (ctx, m) => { - ctx.accept('spacer') -}) + this.lexer.rule(/[a-zA-Z]+/, (ctx, m) => { + ctx.accept('column', m[0]) + }) -orderByLexr.rule(/ASC|DESC/, (ctx, m) => { - ctx.accept('direction', m[0]) -}) + this.lexer.rule(/\s/, (ctx, m) => { + ctx.ignore() + }) + } -orderByLexr.rule(/[a-zA-Z]+/, (ctx, m) => { - ctx.accept('column', m[0]) -}) - -orderByLexr.rule(/\s/, (ctx, m) => { - ctx.ignore() -}) - -export function parseOrderByString(orderBy: string) { + parse(orderBy: string) { const output = [] let holding = [] - orderByLexr + this.lexer .input(orderBy) .tokens() .forEach((token) => { @@ -48,6 +44,7 @@ export function parseOrderByString(orderBy: string) { output.push(holding) } return output + } } const openGroupRegex = /\(/ @@ -61,55 +58,47 @@ const opperatorMap = { '!=': Op.ne, '<=': Op.lte, '>=': Op.gte, - ':': Op.like + '>': Op.gt, + '<': Op.lt, + ':': Op.like, + AND: Op.and, + OR: Op.or } export class FilterParser { - asty: ASTY = new ASTY() - lexer: Tokenizr = this.createLexer() + lexer: Tokenizr - constructor() {} + constructor() { + this.lexer = new Tokenizr() - private createLexer(): Tokenizr { - const tkz = new Tokenizr() - - tkz.rule(openGroupRegex, (ctx, m) => { + this.lexer.rule(openGroupRegex, (ctx, m) => { ctx.accept('opengroup') }) - tkz.rule(closeGroupRegex, (ctx, m) => { + this.lexer.rule(closeGroupRegex, (ctx, m) => { ctx.accept('closegroup') }) - tkz.rule(conjunctinoRegex, (ctx, m) => { + this.lexer.rule(conjunctinoRegex, (ctx, m) => { ctx.accept('conjunction', m[0]) }) - tkz.rule(equalityRegex, (ctx, m) => { + this.lexer.rule(equalityRegex, (ctx, m) => { ctx.accept('column', m[1]) ctx.accept('opperator', m[2]) ctx.accept('value', m[3]) }) - tkz.rule(spacerRegex, (ctx, m) => { + this.lexer.rule(spacerRegex, (ctx, m) => { ctx.ignore() }) - - return tkz } - parseFilter(filter: string) { + parse(filter: string) { this.lexer.input(filter) - console.log(`parsing ${filter}`) - this.lexer.begin() - this.lexer.tokens().forEach((token) => { - console.log(token) - }) - this.lexer.rollback() let block = this.parseBlock('AND') this.lexer.consume('EOF') this.lexer.reset() - return block } @@ -122,12 +111,8 @@ export class FilterParser { () => this.parseEquality(), () => this.parseGroup(), () => { - - console.log("Conjunct") - let conToken = this.lexer.consume('conjunction') - console.log("potato") if (items.length === 1) { activeCon = conToken.value } @@ -137,42 +122,36 @@ export class FilterParser { } else { return this.parseBlock(conToken.value) } - }, () => {} + }, + () => this.parseEmpty() ) if (nextItem === undefined) { - console.log("breaking") break } items.push(nextItem) } - return this.asty.create('conjunction').set('type', activeCon).add(items) + return { [opperatorMap[activeCon]]: items } } - private parseConjunction() {} - private parseEquality() { - console.log("doing an equality") let columnToken = this.lexer.consume('column') let opperatorToken = this.lexer.consume('opperator') let valueToken = this.lexer.consume('value') - let node = this.asty.create('equality') + if (opperatorToken.value === ":") { - node.pos(columnToken.line, columnToken.column, columnToken.pos) + return { + [columnToken.value]: { [opperatorMap[opperatorToken.value]]: `%${valueToken.value.toString()}%` } + } + } else { - node.set(columnToken.type, columnToken.value) - node.set(opperatorToken.type, opperatorToken.value) - node.set(valueToken.type, valueToken.value) - node.set('expression', { - [columnToken.value]: { - [opperatorMap[opperatorToken.value]]: valueToken.value - } - }) - - return node + return { + [columnToken.value]: { [opperatorMap[opperatorToken.value]]: valueToken.value.toString() } + } + } } private parseGroup() { @@ -182,4 +161,8 @@ export class FilterParser { return block } + + private parseEmpty() { + return undefined + } }