don't log known slow queries

This commit is contained in:
zadam 2023-10-20 09:36:57 +02:00
parent 5c10fc26be
commit 4773f9392b
4 changed files with 65 additions and 33 deletions

View File

@ -15,18 +15,22 @@ const entityConstructor = require("../becca/entity_constructor");
const beccaLoaded = new Promise((res, rej) => { const beccaLoaded = new Promise((res, rej) => {
sqlInit.dbReady.then(() => { sqlInit.dbReady.then(() => {
cls.init(() => {
load(); load();
cls.init(() => require('../services/options_init').initStartupOptions()); require('../services/options_init').initStartupOptions();
res(); res();
}); });
}); });
});
function load() { function load() {
const start = Date.now(); const start = Date.now();
becca.reset(); becca.reset();
// we know this is slow and the total becca load time is logged
sql.disableSlowQueryLogging(() => {
// using a raw query and passing arrays to avoid allocating new objects, // using a raw query and passing arrays to avoid allocating new objects,
// this is worth it for the becca load since it happens every run and blocks the app until finished // this is worth it for the becca load since it happens every run and blocks the app until finished
@ -53,6 +57,7 @@ function load() {
for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) { for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) {
new BEtapiToken(row); new BEtapiToken(row);
} }
});
for (const noteId in becca.notes) { for (const noteId in becca.notes) {
becca.notes[noteId].sortParents(); becca.notes[noteId].sortParents();

View File

@ -48,6 +48,14 @@ function isEntityEventsDisabled() {
return !!namespace.get('disableEntityEvents'); return !!namespace.get('disableEntityEvents');
} }
function disableSlowQueryLogging(disable) {
namespace.set('disableSlowQueryLogging', disable);
}
function isSlowQueryLoggingDisabled() {
return !!namespace.get('disableSlowQueryLogging');
}
function getAndClearEntityChangeIds() { function getAndClearEntityChangeIds() {
const entityChangeIds = namespace.get('entityChangeIds') || []; const entityChangeIds = namespace.get('entityChangeIds') || [];
@ -93,4 +101,6 @@ module.exports = {
getAndClearEntityChangeIds, getAndClearEntityChangeIds,
putEntityChange, putEntityChange,
ignoreEntityChangeIds, ignoreEntityChangeIds,
disableSlowQueryLogging,
isSlowQueryLoggingDisabled
}; };

View File

@ -11,14 +11,17 @@ function getEntityHashes() {
const startTime = new Date(); const startTime = new Date();
const hashRows = sql.getRawRows(` // we know this is slow and the total content hash calculation time is logged
const hashRows = sql.disableSlowQueryLogging(
() => sql.getRawRows(`
SELECT entityName, SELECT entityName,
entityId, entityId,
hash, hash,
isErased isErased
FROM entity_changes FROM entity_changes
WHERE isSynced = 1 WHERE isSynced = 1
AND entityName != 'note_reordering'`); AND entityName != 'note_reordering'`)
);
// sorting is faster in memory // sorting is faster in memory
// sorting by entityId is enough, hashes will be segmented by entityName later on anyway // sorting by entityId is enough, hashes will be segmented by entityName later on anyway

View File

@ -225,7 +225,7 @@ function wrap(query, func) {
const milliseconds = Date.now() - startTimestamp; const milliseconds = Date.now() - startTimestamp;
if (milliseconds >= 20) { if (milliseconds >= 20 && !cls.isSlowQueryLoggingDisabled()) {
if (query.includes("WITH RECURSIVE")) { if (query.includes("WITH RECURSIVE")) {
log.info(`Slow recursive query took ${milliseconds}ms.`); log.info(`Slow recursive query took ${milliseconds}ms.`);
} }
@ -295,6 +295,19 @@ async function copyDatabase(targetFilePath) {
await dbConnection.backup(targetFilePath); await dbConnection.backup(targetFilePath);
} }
function disableSlowQueryLogging(cb) {
const orig = cls.isSlowQueryLoggingDisabled();
try {
cls.disableSlowQueryLogging(true);
return cb();
}
finally {
cls.disableSlowQueryLogging(orig);
}
}
module.exports = { module.exports = {
dbConnection, dbConnection,
insert, insert,
@ -367,5 +380,6 @@ module.exports = {
transactional, transactional,
upsert, upsert,
fillParamList, fillParamList,
copyDatabase copyDatabase,
disableSlowQueryLogging
}; };