catch missed additions for db separation, add in tokenizers

This commit is contained in:
jmosrael@gmail.com
2024-05-16 14:48:29 -07:00
parent 5f580e172c
commit 8fa06736b4
6 changed files with 142 additions and 7 deletions

View File

@@ -1,6 +1,8 @@
import express from 'express'
import { json } from 'body-parser'
import { Sequelize } from 'sequelize'
import { database, Character, Game, Pick, App } from './db'
import { lexr, orderByLexr, parseOrderByString } from './tokenizer'
const app = express()
const jsonParser = json()
@@ -34,16 +36,17 @@ app.get('/api/game/:gameId', async (req, res) => {
})
app.post('/api/game', jsonParser, async (req, res) => {
console.log(req.body)
const page = req.body.page || 0
const orderBy = req.body.orderBy || 'id'
const orderBy = req.body.orderBy ? parseOrderByString(req.body.orderBy) : ['id']
const count = req.body.count || 10
const filter = req.body.filter || ''
console.log(filter)
const gameData = await Game.findAll({
offset: page * count,
limit: count
limit: count,
order: orderBy
})
const pageCount = Math.ceil((await Character.count()) / count)