catch missed additions for db separation, add in tokenizers

This commit is contained in:
jmosrael@gmail.com
2024-05-16 14:48:29 -07:00
parent 5f580e172c
commit 8fa06736b4
6 changed files with 142 additions and 7 deletions

49
backend/src/tokenizer.ts Normal file
View File

@@ -0,0 +1,49 @@
import Tokenizr from 'tokenizr'
export const lexr = new Tokenizr()
lexr.rule(/[AND|OR]/, (ctx, m) => {
ctx.accept('conjunction', m[0])
})
export const orderByLexr = new Tokenizr()
orderByLexr.rule(/,/, (ctx, m) => {
ctx.accept('spacer')
})
orderByLexr.rule(/ASC|DESC/, (ctx, m) => {
ctx.accept('direction', m[0])
})
orderByLexr.rule(/[a-zA-Z]+/, (ctx, m) => {
ctx.accept('column', m[0])
})
orderByLexr.rule(/\s/, (ctx, m) => {
ctx.ignore()
})
export function parseOrderByString(orderBy: string) {
const output = []
let holding = []
orderByLexr
.input(orderBy)
.tokens()
.forEach((token) => {
switch (token.type) {
case 'spacer':
output.push(holding)
holding = []
break
case 'column':
case 'direction':
holding.push(token.value)
break
}
})
if (holding) {
output.push(holding)
}
return output
}