reduction job, chart duplicates
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
@ -11,6 +11,7 @@
|
||||
|
||||
const knex = require ('knex');
|
||||
const fs = require ('fs');
|
||||
const job = require ('./job');
|
||||
|
||||
let db = null;
|
||||
|
||||
@ -18,8 +19,10 @@ async function init (use_fake_seed) {
|
||||
if (!fs.existsSync ('data'))
|
||||
fs.mkdirSync ('data');
|
||||
|
||||
if (use_fake_seed && fs.existsSync ('data/db.sqlite'))
|
||||
if (use_fake_seed && fs.existsSync ('data/db.sqlite')) {
|
||||
fs.unlinkSync ('data/db.sqlite');
|
||||
fs.copyFileSync ('data/test.sqlite', 'data/db.sqlite');
|
||||
}
|
||||
|
||||
db = knex ({
|
||||
client: 'sqlite',
|
||||
@ -30,10 +33,11 @@ async function init (use_fake_seed) {
|
||||
});
|
||||
|
||||
await db.migrate.latest ();
|
||||
if (use_fake_seed)
|
||||
await db.seed.run ({ specific: 'fake.js' });
|
||||
else
|
||||
await db.seed.run ({ specific: 'prod.js' });
|
||||
|
||||
await db.seed.run ({ specific: 'prod.js' });
|
||||
|
||||
// await db.seed.run ({ specific: 'fake.js' });
|
||||
await job (db);
|
||||
}
|
||||
|
||||
function get_db () {
|
||||
|
85
lib/db/job.js
Normal file
85
lib/db/job.js
Normal file
@ -0,0 +1,85 @@
|
||||
/* eslint-disable no-console */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
'use strict';
|
||||
|
||||
const chunk = require ('lodash.chunk');
|
||||
|
||||
function get_targets (knex, app_id, duration, reduction = null) {
|
||||
return knex.from ('log')
|
||||
.where ({ app_id })
|
||||
.andWhere ('timestamp', '<', Number (new Date) - (duration * 1000))
|
||||
.andWhere ((builder) => {
|
||||
if (reduction === null) {
|
||||
builder.whereNotNull ('id');
|
||||
}
|
||||
else {
|
||||
builder.where ('reduction', '<', reduction)
|
||||
.orWhere ('reduction', null);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function batch_delete (knex, ids) {
|
||||
for (const c of chunk (ids, 100)) {
|
||||
await knex ('log')
|
||||
.whereIn ('id', c)
|
||||
.del ();
|
||||
}
|
||||
|
||||
return ids.length;
|
||||
}
|
||||
|
||||
async function batch_update (knex, ids, data) {
|
||||
for (const c of chunk (ids, 100)) {
|
||||
await knex ('log')
|
||||
.whereIn ('id', c)
|
||||
.update (data);
|
||||
}
|
||||
|
||||
return ids.length;
|
||||
}
|
||||
|
||||
module.exports = async (knex) => {
|
||||
const apps = await knex ('app')
|
||||
.select ('id', 'name', 'reduction');
|
||||
|
||||
for (const app of apps) {
|
||||
const reduction = JSON.parse (app.reduction);
|
||||
const duplicates = reduction.shift ();
|
||||
const end = reduction.pop ();
|
||||
|
||||
// delete anything older than now - end
|
||||
const deleted_old = await get_targets (knex, app.id, end)
|
||||
.del ();
|
||||
|
||||
console.log (`deleted ${deleted_old} old datasets`);
|
||||
|
||||
for (const r of reduction) {
|
||||
const targets = (await get_targets (knex, app.id, r, r)
|
||||
.orderBy ('timestamp')
|
||||
.select ('id'))
|
||||
.map ((v) => v.id);
|
||||
|
||||
const even = targets.filter ((v, i) => (i % 2 === 0));
|
||||
const odd = targets.filter ((v, i) => (i % 2 !== 0));
|
||||
|
||||
const deleted_reduction = await batch_delete (knex, even);
|
||||
|
||||
console.log (`reduction ${r} deleted ${deleted_reduction}`);
|
||||
|
||||
await batch_update (knex, odd, { reduction: r });
|
||||
}
|
||||
|
||||
const deleted_duplicates = await get_targets (knex, app.id, duplicates)
|
||||
.andWhere ((builder) => {
|
||||
builder.whereNotIn ('id', (inBuilder) => {
|
||||
get_targets (inBuilder, app.id, duplicates)
|
||||
.groupBy ('message', 'data')
|
||||
.min ({ id: 'id' });
|
||||
});
|
||||
})
|
||||
.del ();
|
||||
|
||||
console.log (`deleted ${deleted_duplicates} duplicates`);
|
||||
}
|
||||
};
|
23
lib/defaults.js
Normal file
23
lib/defaults.js
Normal file
@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
app: {
|
||||
|
||||
/**
|
||||
* reduce data after given time durations
|
||||
* 1. delete duplicates (1 day)
|
||||
* 2. divide by 2 (1 week)
|
||||
* ...
|
||||
* 7. delete all (6 weeks)
|
||||
*/
|
||||
reduction: JSON.stringify ([
|
||||
86400,
|
||||
604800,
|
||||
1209600,
|
||||
1814400,
|
||||
2419200,
|
||||
3024000,
|
||||
3628800
|
||||
])
|
||||
}
|
||||
};
|
Reference in New Issue
Block a user