reduction job, chart duplicates
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Timo Hocker 2020-08-25 15:24:20 +02:00
parent 19904bd0ec
commit f57444a3d1
11 changed files with 185 additions and 29 deletions

View File

@ -11,6 +11,7 @@
const knex = require ('knex'); const knex = require ('knex');
const fs = require ('fs'); const fs = require ('fs');
const job = require ('./job');
let db = null; let db = null;
@ -18,8 +19,10 @@ async function init (use_fake_seed) {
if (!fs.existsSync ('data')) if (!fs.existsSync ('data'))
fs.mkdirSync ('data'); fs.mkdirSync ('data');
if (use_fake_seed && fs.existsSync ('data/db.sqlite')) if (use_fake_seed && fs.existsSync ('data/db.sqlite')) {
fs.unlinkSync ('data/db.sqlite'); fs.unlinkSync ('data/db.sqlite');
fs.copyFileSync ('data/test.sqlite', 'data/db.sqlite');
}
db = knex ({ db = knex ({
client: 'sqlite', client: 'sqlite',
@ -30,10 +33,11 @@ async function init (use_fake_seed) {
}); });
await db.migrate.latest (); await db.migrate.latest ();
if (use_fake_seed)
await db.seed.run ({ specific: 'fake.js' }); await db.seed.run ({ specific: 'prod.js' });
else
await db.seed.run ({ specific: 'prod.js' }); // await db.seed.run ({ specific: 'fake.js' });
await job (db);
} }
function get_db () { function get_db () {

85
lib/db/job.js Normal file
View File

@ -0,0 +1,85 @@
/* eslint-disable no-console */
/* eslint-disable no-await-in-loop */
'use strict';
const chunk = require ('lodash.chunk');
function get_targets (knex, app_id, duration, reduction = null) {
return knex.from ('log')
.where ({ app_id })
.andWhere ('timestamp', '<', Number (new Date) - (duration * 1000))
.andWhere ((builder) => {
if (reduction === null) {
builder.whereNotNull ('id');
}
else {
builder.where ('reduction', '<', reduction)
.orWhere ('reduction', null);
}
});
}
async function batch_delete (knex, ids) {
for (const c of chunk (ids, 100)) {
await knex ('log')
.whereIn ('id', c)
.del ();
}
return ids.length;
}
async function batch_update (knex, ids, data) {
for (const c of chunk (ids, 100)) {
await knex ('log')
.whereIn ('id', c)
.update (data);
}
return ids.length;
}
module.exports = async (knex) => {
const apps = await knex ('app')
.select ('id', 'name', 'reduction');
for (const app of apps) {
const reduction = JSON.parse (app.reduction);
const duplicates = reduction.shift ();
const end = reduction.pop ();
// delete anything older than now - end
const deleted_old = await get_targets (knex, app.id, end)
.del ();
console.log (`deleted ${deleted_old} old datasets`);
for (const r of reduction) {
const targets = (await get_targets (knex, app.id, r, r)
.orderBy ('timestamp')
.select ('id'))
.map ((v) => v.id);
const even = targets.filter ((v, i) => (i % 2 === 0));
const odd = targets.filter ((v, i) => (i % 2 !== 0));
const deleted_reduction = await batch_delete (knex, even);
console.log (`reduction ${r} deleted ${deleted_reduction}`);
await batch_update (knex, odd, { reduction: r });
}
const deleted_duplicates = await get_targets (knex, app.id, duplicates)
.andWhere ((builder) => {
builder.whereNotIn ('id', (inBuilder) => {
get_targets (inBuilder, app.id, duplicates)
.groupBy ('message', 'data')
.min ({ id: 'id' });
});
})
.del ();
console.log (`deleted ${deleted_duplicates} duplicates`);
}
};

23
lib/defaults.js Normal file
View File

@ -0,0 +1,23 @@
'use strict';
module.exports = {
app: {
/**
* reduce data after given time durations
* 1. delete duplicates (1 day)
* 2. divide by 2 (1 week)
* ...
* 7. delete all (6 weeks)
*/
reduction: JSON.stringify ([
86400,
604800,
1209600,
1814400,
2419200,
3024000,
3628800
])
}
};

View File

@ -0,0 +1,22 @@
'use strict';
const defaults = require ('../lib/defaults');
async function up (knex) {
await knex.schema.table ('app', (t) => {
t.string ('reduction');
});
await knex.schema.table ('log', (t) => {
t.integer ('reduction');
});
await knex ('app')
.update ({ reduction: defaults.app.reduction });
}
function down () {
// noop
}
module.exports = { up, down };

View File

@ -20,6 +20,7 @@
"express-http-proxy": "^1.6.2", "express-http-proxy": "^1.6.2",
"faker": "^4.1.0", "faker": "^4.1.0",
"knex": "^0.21.2", "knex": "^0.21.2",
"lodash.chunk": "^4.2.0",
"sqlite3": "^5.0.0", "sqlite3": "^5.0.0",
"vue": "^2.6.11", "vue": "^2.6.11",
"vue-chartjs": "^3.5.0", "vue-chartjs": "^3.5.0",
@ -45,5 +46,8 @@
"author": { "author": {
"name": "Timo Hocker", "name": "Timo Hocker",
"email": "timo@scode.ovh" "email": "timo@scode.ovh"
},
"engines": {
"node": ">=10.0.0"
} }
} }

View File

@ -9,16 +9,6 @@
const faker = require ('faker'); const faker = require ('faker');
const apps = [];
async function create_app (knex) {
const [ id ] = await knex ('app')
.insert (
{ name: faker.random.word () }
);
apps.push (id);
}
let last_t = 0; let last_t = 0;
let last_h = 0; let last_h = 0;
@ -32,7 +22,7 @@ function create_log (timestamp) {
humidity: last_h humidity: last_h
}; };
return { return {
app_id: faker.random.arrayElement (apps), app_id: 1,
message: faker.random.words (), message: faker.random.words (),
data: JSON.stringify (data), data: JSON.stringify (data),
timestamp timestamp
@ -40,15 +30,14 @@ function create_log (timestamp) {
} }
async function seed (knex) { async function seed (knex) {
await knex ('log')
.del ();
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.log ('creating seeds'); console.log ('creating seeds');
for (let i = 0; i < 5; i++)
// eslint-disable-next-line no-await-in-loop
await create_app (knex);
const log = (Array (1000)) const log = (Array (10000))
.fill (() => null) .fill (() => null)
.map (() => faker.date.recent (30)) .map (() => faker.date.recent (60))
.sort () .sort ()
.map ((t) => create_log (t)); .map ((t) => create_log (t));

View File

@ -17,6 +17,10 @@ export default {
yaxis: { yaxis: {
type: Array, type: Array,
required: true required: true
},
remove_duplicates: {
type: Boolean,
default: false
} }
}, },
computed: { computed: {
@ -28,10 +32,23 @@ export default {
data: [], data: [],
yAxisID: index, yAxisID: index,
borderColor: y.color, borderColor: y.color,
backgroundColor: y.fill backgroundColor: y.fill,
spanGaps: true
}; };
for (const data of this.data) let last = null;
res.data.push (resolve_data (data, y.field)); for (let i = 0; i < this.data.length; i++) {
const data = this.data[i];
const val = resolve_data (data, y.field);
if (
!this.remove_duplicates
|| last !== val
|| this.data.length - 1 === i
)
res.data.push (val);
else
res.data.push (null);
last = val;
}
return res; return res;
}); });
return { datasets, labels }; return { datasets, labels };

View File

@ -4,6 +4,7 @@
:data="[...data].reverse()" :data="[...data].reverse()"
:xaxis="config.x" :xaxis="config.x"
:yaxis="config.y" :yaxis="config.y"
:remove_duplicates="config.remove_duplicates"
/> />
<TableView <TableView
v-else v-else

View File

@ -20,10 +20,11 @@ export default {
], ],
displays: [ displays: [
{ {
source: 'default', source: 'default',
type: 'chart', type: 'chart',
x: 'timestamp', remove_duplicates: true,
y: [ x: 'timestamp',
y: [
{ {
label: 'temperature', label: 'temperature',
field: 'data/temperature', field: 'data/temperature',

View File

@ -73,6 +73,11 @@ export default {
{ type: 'number', name: 'max_value' } { type: 'number', name: 'max_value' }
] ]
} }
},
{
name: 'remove_duplicates',
type: 'boolean',
if: { prop: 'type', op: '=', val: 'chart' }
} }
] ]
} }

View File

@ -5612,6 +5612,11 @@ locate-path@^5.0.0:
dependencies: dependencies:
p-locate "^4.1.0" p-locate "^4.1.0"
lodash.chunk@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.chunk/-/lodash.chunk-4.2.0.tgz#66e5ce1f76ed27b4303d8c6512e8d1216e8106bc"
integrity sha1-ZuXOH3btJ7QwPYxlEujRIW6BBrw=
lodash.defaultsdeep@^4.6.1: lodash.defaultsdeep@^4.6.1:
version "4.6.1" version "4.6.1"
resolved "https://registry.yarnpkg.com/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.1.tgz#512e9bd721d272d94e3d3a63653fa17516741ca6" resolved "https://registry.yarnpkg.com/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.1.tgz#512e9bd721d272d94e3d3a63653fa17516741ca6"