prevent huge data amounts, separate sources

This commit is contained in:
2020-08-23 14:17:10 +02:00
parent 39f9f17b95
commit b6ecd65748
9 changed files with 213 additions and 102 deletions

View File

@ -11,15 +11,19 @@ const db = require ('../db');
const { http } = require ('@sapphirecode/consts');
module.exports = async (req, res) => {
if (
typeof req.headers.app_id === 'undefined'
|| isNaN (parseInt (req.headers.app_id))
) {
const limit = parseInt (req.headers.limit);
const offset = parseInt (req.headers.offset);
const app_id = parseInt (req.headers.app_id);
if (isNaN (app_id)) {
res.status (http.status_bad_request)
.end ();
}
else {
res.status (http.status_ok)
.json (await db.log.get_all (parseInt (req.headers.app_id)));
.end ('app id not specified');
}
res.status (http.status_ok)
.json (await db.log.get_all (
parseInt (req.headers.app_id),
isNaN (limit) ? 100 : limit,
isNaN (offset) ? 0 : offset
));
};

View File

@ -8,7 +8,7 @@
'use strict';
module.exports = (get_db) => ({
get_all (app_id) {
get_all (app_id, limit = 100, offset = 0) {
const knex = get_db ();
return knex.select (
'id',
@ -17,7 +17,10 @@ module.exports = (get_db) => ({
'timestamp'
)
.from ('log')
.where ({ app_id });
.where ({ app_id })
.orderBy ('timestamp')
.limit (Math.min (limit, 10000))
.offset (offset);
},
insert (app_id, message, data = '{}', timestamp = (new Date)) {
const knex = get_db ();