mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
split hash checks per table with recent notes not reported into frontend as error
This commit is contained in:
parent
206a6dd6e7
commit
6edaf0ed00
@ -11,7 +11,7 @@ const content_hash = require('../../services/content_hash');
|
||||
|
||||
router.get('/check', auth.checkApiAuth, async (req, res, next) => {
|
||||
res.send({
|
||||
'content_hash': await content_hash.getContentHash(),
|
||||
'hashes': await content_hash.getHashes(),
|
||||
'max_sync_id': await sql.getSingleValue('SELECT MAX(id) FROM sync')
|
||||
});
|
||||
});
|
||||
|
@ -2,7 +2,9 @@ const sql = require('./sql');
|
||||
const utils = require('./utils');
|
||||
const options = require('./options');
|
||||
|
||||
function updateHash(hash, rows) {
|
||||
function getHash(rows) {
|
||||
let hash = '';
|
||||
|
||||
for (const row of rows) {
|
||||
hash = utils.hash(hash + JSON.stringify(row));
|
||||
}
|
||||
@ -10,29 +12,58 @@ function updateHash(hash, rows) {
|
||||
return hash;
|
||||
}
|
||||
|
||||
async function getContentHash() {
|
||||
let hash = '';
|
||||
async function getHashes() {
|
||||
const optionsQuestionMarks = Array(options.SYNCED_OPTIONS.length).fill('?').join(',');
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_id, note_title, note_text, date_modified, is_protected, " +
|
||||
"is_deleted FROM notes ORDER BY note_id"));
|
||||
return {
|
||||
notes: getHash(await sql.getResults(`SELECT
|
||||
note_id,
|
||||
note_title,
|
||||
note_text,
|
||||
date_modified,
|
||||
is_protected,
|
||||
is_deleted
|
||||
FROM notes
|
||||
ORDER BY note_id`)),
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_tree_id, note_id, note_pid, note_pos, date_modified, " +
|
||||
"is_deleted, prefix FROM notes_tree ORDER BY note_tree_id"));
|
||||
notes_tree: getHash(await sql.getResults(`SELECT
|
||||
note_tree_id,
|
||||
note_id,
|
||||
note_pid,
|
||||
note_pos,
|
||||
date_modified,
|
||||
is_deleted,
|
||||
prefix
|
||||
FROM notes_tree
|
||||
ORDER BY note_tree_id`)),
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_history_id, note_id, note_title, note_text, " +
|
||||
"date_modified_from, date_modified_to FROM notes_history ORDER BY note_history_id"));
|
||||
notes_history: getHash(await sql.getResults(`SELECT
|
||||
note_history_id,
|
||||
note_id,
|
||||
note_title,
|
||||
note_text,
|
||||
date_modified_from,
|
||||
date_modified_to
|
||||
FROM notes_history
|
||||
ORDER BY note_history_id`)),
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_tree_id, note_path, date_accessed, is_deleted FROM recent_notes " +
|
||||
"ORDER BY note_path"));
|
||||
recent_notes: getHash(await sql.getResults(`SELECT
|
||||
note_tree_id,
|
||||
note_path,
|
||||
date_accessed,
|
||||
is_deleted
|
||||
FROM recent_notes
|
||||
ORDER BY note_path`)),
|
||||
|
||||
const questionMarks = Array(options.SYNCED_OPTIONS.length).fill('?').join(',');
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT opt_name, opt_value FROM options " +
|
||||
"WHERE opt_name IN (" + questionMarks + ") ORDER BY opt_name", options.SYNCED_OPTIONS));
|
||||
|
||||
return hash;
|
||||
options: getHash(await sql.getResults(`SELECT
|
||||
opt_name,
|
||||
opt_value
|
||||
FROM options
|
||||
WHERE opt_name IN (${optionsQuestionMarks})
|
||||
ORDER BY opt_name`, options.SYNCED_OPTIONS))
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getContentHash
|
||||
getHashes
|
||||
};
|
@ -127,7 +127,7 @@ async function pullSync(syncContext) {
|
||||
|
||||
const resp = await syncRequest(syncContext, 'GET', "/api/sync/" + sync.entity_name + "/" + encodeURIComponent(sync.entity_id));
|
||||
|
||||
if (!resp) {
|
||||
if (!resp || !resp.entity) {
|
||||
log.error("Empty response to pull for " + sync.entity_name + ", id=" + sync.entity_id);
|
||||
}
|
||||
else if (sync.entity_name === 'notes') {
|
||||
@ -254,21 +254,30 @@ async function checkContentHash(syncContext) {
|
||||
|
||||
const resp = await syncRequest(syncContext, 'GET', '/api/sync/check');
|
||||
|
||||
// if (await getLastSyncedPull() < resp.max_sync_id) {
|
||||
// log.info("There are some outstanding pulls, skipping content check.");
|
||||
//
|
||||
// return;
|
||||
// }
|
||||
if (await getLastSyncedPull() < resp.max_sync_id) {
|
||||
log.info("There are some outstanding pulls, skipping content check.");
|
||||
|
||||
const localContentHash = await content_hash.getContentHash();
|
||||
|
||||
if (resp.content_hash === localContentHash) {
|
||||
log.info("Content hash check PASSED with value: " + localContentHash);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
await messaging.sendMessage({type: 'sync-hash-check-failed'});
|
||||
|
||||
await event_log.addEvent("Content hash check FAILED. Local is " + localContentHash + ", remote is " + resp.content_hash);
|
||||
const hashes = await content_hash.getHashes();
|
||||
let allChecksPassed = true;
|
||||
|
||||
for (const key in hashes) {
|
||||
if (hashes[key] !== resp.hashes[key]) {
|
||||
allChecksPassed = true;
|
||||
|
||||
await event_log.addEvent(`Content hash check for ${key} FAILED. Local is ${hashes[key]}, remote is ${resp.hashes[key]}`);
|
||||
|
||||
if (key !== 'recent_notes') {
|
||||
// let's not get alarmed about recent notes which get updated often and can cause failures in race conditions
|
||||
await messaging.sendMessage({type: 'sync-hash-check-failed'});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (allChecksPassed) {
|
||||
log.info("Content hash checks PASSED");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,7 +38,7 @@ async function addEntitySync(entityName, entityId, sourceId) {
|
||||
if (!sync_setup.isSyncSetup) {
|
||||
// this is because the "server" instances shouldn't have outstanding pushes
|
||||
// useful when you fork the DB for new "client" instance, it won't try to sync the whole DB
|
||||
await sql.execute("UPDATE options SET opt_value = (SELECT MAX(id) FROM sync) WHERE opt_name = 'last_synced_push'");
|
||||
await sql.execute("UPDATE options SET opt_value = (SELECT MAX(id) FROM sync) WHERE opt_name IN('last_synced_push', 'last_synced_pull')");
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user