mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
split hash checks per table with recent notes not reported into frontend as error
This commit is contained in:
parent
206a6dd6e7
commit
6edaf0ed00
@ -11,7 +11,7 @@ const content_hash = require('../../services/content_hash');
|
|||||||
|
|
||||||
router.get('/check', auth.checkApiAuth, async (req, res, next) => {
|
router.get('/check', auth.checkApiAuth, async (req, res, next) => {
|
||||||
res.send({
|
res.send({
|
||||||
'content_hash': await content_hash.getContentHash(),
|
'hashes': await content_hash.getHashes(),
|
||||||
'max_sync_id': await sql.getSingleValue('SELECT MAX(id) FROM sync')
|
'max_sync_id': await sql.getSingleValue('SELECT MAX(id) FROM sync')
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -2,7 +2,9 @@ const sql = require('./sql');
|
|||||||
const utils = require('./utils');
|
const utils = require('./utils');
|
||||||
const options = require('./options');
|
const options = require('./options');
|
||||||
|
|
||||||
function updateHash(hash, rows) {
|
function getHash(rows) {
|
||||||
|
let hash = '';
|
||||||
|
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
hash = utils.hash(hash + JSON.stringify(row));
|
hash = utils.hash(hash + JSON.stringify(row));
|
||||||
}
|
}
|
||||||
@ -10,29 +12,58 @@ function updateHash(hash, rows) {
|
|||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getContentHash() {
|
async function getHashes() {
|
||||||
let hash = '';
|
const optionsQuestionMarks = Array(options.SYNCED_OPTIONS.length).fill('?').join(',');
|
||||||
|
|
||||||
hash = updateHash(hash, await sql.getResults("SELECT note_id, note_title, note_text, date_modified, is_protected, " +
|
return {
|
||||||
"is_deleted FROM notes ORDER BY note_id"));
|
notes: getHash(await sql.getResults(`SELECT
|
||||||
|
note_id,
|
||||||
|
note_title,
|
||||||
|
note_text,
|
||||||
|
date_modified,
|
||||||
|
is_protected,
|
||||||
|
is_deleted
|
||||||
|
FROM notes
|
||||||
|
ORDER BY note_id`)),
|
||||||
|
|
||||||
hash = updateHash(hash, await sql.getResults("SELECT note_tree_id, note_id, note_pid, note_pos, date_modified, " +
|
notes_tree: getHash(await sql.getResults(`SELECT
|
||||||
"is_deleted, prefix FROM notes_tree ORDER BY note_tree_id"));
|
note_tree_id,
|
||||||
|
note_id,
|
||||||
|
note_pid,
|
||||||
|
note_pos,
|
||||||
|
date_modified,
|
||||||
|
is_deleted,
|
||||||
|
prefix
|
||||||
|
FROM notes_tree
|
||||||
|
ORDER BY note_tree_id`)),
|
||||||
|
|
||||||
hash = updateHash(hash, await sql.getResults("SELECT note_history_id, note_id, note_title, note_text, " +
|
notes_history: getHash(await sql.getResults(`SELECT
|
||||||
"date_modified_from, date_modified_to FROM notes_history ORDER BY note_history_id"));
|
note_history_id,
|
||||||
|
note_id,
|
||||||
|
note_title,
|
||||||
|
note_text,
|
||||||
|
date_modified_from,
|
||||||
|
date_modified_to
|
||||||
|
FROM notes_history
|
||||||
|
ORDER BY note_history_id`)),
|
||||||
|
|
||||||
hash = updateHash(hash, await sql.getResults("SELECT note_tree_id, note_path, date_accessed, is_deleted FROM recent_notes " +
|
recent_notes: getHash(await sql.getResults(`SELECT
|
||||||
"ORDER BY note_path"));
|
note_tree_id,
|
||||||
|
note_path,
|
||||||
|
date_accessed,
|
||||||
|
is_deleted
|
||||||
|
FROM recent_notes
|
||||||
|
ORDER BY note_path`)),
|
||||||
|
|
||||||
const questionMarks = Array(options.SYNCED_OPTIONS.length).fill('?').join(',');
|
options: getHash(await sql.getResults(`SELECT
|
||||||
|
opt_name,
|
||||||
hash = updateHash(hash, await sql.getResults("SELECT opt_name, opt_value FROM options " +
|
opt_value
|
||||||
"WHERE opt_name IN (" + questionMarks + ") ORDER BY opt_name", options.SYNCED_OPTIONS));
|
FROM options
|
||||||
|
WHERE opt_name IN (${optionsQuestionMarks})
|
||||||
return hash;
|
ORDER BY opt_name`, options.SYNCED_OPTIONS))
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getContentHash
|
getHashes
|
||||||
};
|
};
|
@ -127,7 +127,7 @@ async function pullSync(syncContext) {
|
|||||||
|
|
||||||
const resp = await syncRequest(syncContext, 'GET', "/api/sync/" + sync.entity_name + "/" + encodeURIComponent(sync.entity_id));
|
const resp = await syncRequest(syncContext, 'GET', "/api/sync/" + sync.entity_name + "/" + encodeURIComponent(sync.entity_id));
|
||||||
|
|
||||||
if (!resp) {
|
if (!resp || !resp.entity) {
|
||||||
log.error("Empty response to pull for " + sync.entity_name + ", id=" + sync.entity_id);
|
log.error("Empty response to pull for " + sync.entity_name + ", id=" + sync.entity_id);
|
||||||
}
|
}
|
||||||
else if (sync.entity_name === 'notes') {
|
else if (sync.entity_name === 'notes') {
|
||||||
@ -254,21 +254,30 @@ async function checkContentHash(syncContext) {
|
|||||||
|
|
||||||
const resp = await syncRequest(syncContext, 'GET', '/api/sync/check');
|
const resp = await syncRequest(syncContext, 'GET', '/api/sync/check');
|
||||||
|
|
||||||
// if (await getLastSyncedPull() < resp.max_sync_id) {
|
if (await getLastSyncedPull() < resp.max_sync_id) {
|
||||||
// log.info("There are some outstanding pulls, skipping content check.");
|
log.info("There are some outstanding pulls, skipping content check.");
|
||||||
//
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
|
|
||||||
const localContentHash = await content_hash.getContentHash();
|
return;
|
||||||
|
|
||||||
if (resp.content_hash === localContentHash) {
|
|
||||||
log.info("Content hash check PASSED with value: " + localContentHash);
|
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
await messaging.sendMessage({type: 'sync-hash-check-failed'});
|
|
||||||
|
|
||||||
await event_log.addEvent("Content hash check FAILED. Local is " + localContentHash + ", remote is " + resp.content_hash);
|
const hashes = await content_hash.getHashes();
|
||||||
|
let allChecksPassed = true;
|
||||||
|
|
||||||
|
for (const key in hashes) {
|
||||||
|
if (hashes[key] !== resp.hashes[key]) {
|
||||||
|
allChecksPassed = true;
|
||||||
|
|
||||||
|
await event_log.addEvent(`Content hash check for ${key} FAILED. Local is ${hashes[key]}, remote is ${resp.hashes[key]}`);
|
||||||
|
|
||||||
|
if (key !== 'recent_notes') {
|
||||||
|
// let's not get alarmed about recent notes which get updated often and can cause failures in race conditions
|
||||||
|
await messaging.sendMessage({type: 'sync-hash-check-failed'});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allChecksPassed) {
|
||||||
|
log.info("Content hash checks PASSED");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ async function addEntitySync(entityName, entityId, sourceId) {
|
|||||||
if (!sync_setup.isSyncSetup) {
|
if (!sync_setup.isSyncSetup) {
|
||||||
// this is because the "server" instances shouldn't have outstanding pushes
|
// this is because the "server" instances shouldn't have outstanding pushes
|
||||||
// useful when you fork the DB for new "client" instance, it won't try to sync the whole DB
|
// useful when you fork the DB for new "client" instance, it won't try to sync the whole DB
|
||||||
await sql.execute("UPDATE options SET opt_value = (SELECT MAX(id) FROM sync) WHERE opt_name = 'last_synced_push'");
|
await sql.execute("UPDATE options SET opt_value = (SELECT MAX(id) FROM sync) WHERE opt_name IN('last_synced_push', 'last_synced_pull')");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user