catch missed additions for db separation, add in tokenizers
This commit is contained in:
49
backend/src/tokenizer.ts
Normal file
49
backend/src/tokenizer.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import Tokenizr from 'tokenizr'
|
||||
|
||||
export const lexr = new Tokenizr()
|
||||
|
||||
lexr.rule(/[AND|OR]/, (ctx, m) => {
|
||||
ctx.accept('conjunction', m[0])
|
||||
})
|
||||
|
||||
export const orderByLexr = new Tokenizr()
|
||||
|
||||
orderByLexr.rule(/,/, (ctx, m) => {
|
||||
ctx.accept('spacer')
|
||||
})
|
||||
|
||||
orderByLexr.rule(/ASC|DESC/, (ctx, m) => {
|
||||
ctx.accept('direction', m[0])
|
||||
})
|
||||
|
||||
orderByLexr.rule(/[a-zA-Z]+/, (ctx, m) => {
|
||||
ctx.accept('column', m[0])
|
||||
})
|
||||
|
||||
orderByLexr.rule(/\s/, (ctx, m) => {
|
||||
ctx.ignore()
|
||||
})
|
||||
|
||||
export function parseOrderByString(orderBy: string) {
|
||||
const output = []
|
||||
let holding = []
|
||||
orderByLexr
|
||||
.input(orderBy)
|
||||
.tokens()
|
||||
.forEach((token) => {
|
||||
switch (token.type) {
|
||||
case 'spacer':
|
||||
output.push(holding)
|
||||
holding = []
|
||||
break
|
||||
case 'column':
|
||||
case 'direction':
|
||||
holding.push(token.value)
|
||||
break
|
||||
}
|
||||
})
|
||||
if (holding) {
|
||||
output.push(holding)
|
||||
}
|
||||
return output
|
||||
}
|
||||
Reference in New Issue
Block a user