diff --git a/backend/package-lock.json b/backend/package-lock.json index 68ab0e6..6e87e30 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -10,6 +10,7 @@ "license": "ISC", "dependencies": { "@types/body-parser": "^1.19.5", + "asty": "^1.8.21", "body-parser": "^1.20.2", "express": "^4.18.3", "sequelize": "^6.37.1", @@ -901,6 +902,14 @@ "node": ">=8" } }, + "node_modules/asty": { + "version": "1.8.21", + "resolved": "https://registry.npmjs.org/asty/-/asty-1.8.21.tgz", + "integrity": "sha512-LxWJZ1WuxaGYXL7Q8Kx4BoHFGJvTkkQKow6Dy4gcJ3UiKaKdhZkqrBOwPybPR9JiUOzCeOhUdu6/xz4WBTU7OQ==", + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", diff --git a/backend/package.json b/backend/package.json index 8514744..40f98ca 100644 --- a/backend/package.json +++ b/backend/package.json @@ -24,6 +24,7 @@ }, "dependencies": { "@types/body-parser": "^1.19.5", + "asty": "^1.8.21", "body-parser": "^1.20.2", "express": "^4.18.3", "sequelize": "^6.37.1", diff --git a/backend/src/app.ts b/backend/src/app.ts index 6f073bb..38994f4 100644 --- a/backend/src/app.ts +++ b/backend/src/app.ts @@ -2,7 +2,7 @@ import express from 'express' import { json } from 'body-parser' import { Sequelize } from 'sequelize' import { database, Character, Game, Pick, App } from './db' -import { lexr, orderByLexr, parseOrderByString } from './tokenizer' +import { lexr, parseOrderByString, FilterParser } from './tokenizer' const app = express() const jsonParser = json() @@ -50,6 +50,15 @@ app.post('/api/game', jsonParser, async (req, res) => { }) const pageCount = Math.ceil((await Character.count()) / count) + let fp = new FilterParser() + + fp.lexer + .input(filter) + .tokens() + .forEach((t) => { + console.log(t) + }) + res.setHeader('Content-Type', 'application/json') res.send({ gameData, pageCount }) }) diff --git a/backend/src/tokenizer.ts b/backend/src/tokenizer.ts index c5ce390..4ca7e9d 100644 --- a/backend/src/tokenizer.ts +++ b/backend/src/tokenizer.ts @@ -1,4 +1,5 @@ import Tokenizr from 'tokenizr' +import ASTY from 'asty' export const lexr = new Tokenizr() @@ -47,3 +48,49 @@ export function parseOrderByString(orderBy: string) { } return output } + +const openGroupRegex = /\(/ +const closeGroupRegex = /\)/ +const conjunctinoRegex = /AND|OR/ +const statementRegex = /([a-zA-Z]+)\s?(=|!=|<|>|<=|>=|:)\s?([a-zA-Z\d"']+)/ +const spacerRegex = /\s/ + +export class FilterParser { + + lexer: Tokenizr = this.createLexer() + + constructor() {} + + private createLexer(): Tokenizr { + const tkz = new Tokenizr() + + tkz.rule(openGroupRegex, (ctx, m) => { + ctx.accept('opengroup') + }) + + tkz.rule(closeGroupRegex, (ctx, m) => { + ctx.accept('closegroup') + }) + + tkz.rule(conjunctinoRegex, (ctx, m) => { + ctx.accept('conjunction', m[0]) + }) + + tkz.rule(statementRegex, (ctx, m) => { + ctx.accept('column', m[1]) + ctx.accept('opperator', m[2]) + ctx.accept('value', m[3]) + }) + + tkz.rule(spacerRegex, (ctx, m)=> { + ctx.ignore(); + }) + + return tkz + } + + buildAST(): ASTY { + const ast = new ASTY() + } + +}