mirror of
https://github.com/zadam/trilium.git
synced 2025-06-06 18:08:33 +02:00
added sectors for contect check computation
This commit is contained in:
parent
b7cf4fe96b
commit
77311954a1
@ -5,7 +5,7 @@ const packageJson = require('../../package');
|
|||||||
const {TRILIUM_DATA_DIR} = require('./data_dir');
|
const {TRILIUM_DATA_DIR} = require('./data_dir');
|
||||||
|
|
||||||
const APP_DB_VERSION = 155;
|
const APP_DB_VERSION = 155;
|
||||||
const SYNC_VERSION = 12;
|
const SYNC_VERSION = 13;
|
||||||
const CLIPPER_PROTOCOL_VERSION = "1.0";
|
const CLIPPER_PROTOCOL_VERSION = "1.0";
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
@ -12,65 +12,67 @@ const NoteRevision = require('../entities/note_revision');
|
|||||||
const RecentNote = require('../entities/recent_note');
|
const RecentNote = require('../entities/recent_note');
|
||||||
const Option = require('../entities/option');
|
const Option = require('../entities/option');
|
||||||
|
|
||||||
async function getHash(tableName, primaryKeyName, whereBranch) {
|
async function getSectorHashes(tableName, primaryKeyName, whereBranch) {
|
||||||
// subselect is necessary to have correct ordering in GROUP_CONCAT
|
// subselect is necessary to have correct ordering in GROUP_CONCAT
|
||||||
const query = `SELECT GROUP_CONCAT(hash) FROM (SELECT hash FROM ${tableName} `
|
const query = `SELECT SUBSTR(${primaryKeyName}, 1, 1), GROUP_CONCAT(hash) FROM ${tableName} `
|
||||||
+ (whereBranch ? `WHERE ${whereBranch} ` : '') + `ORDER BY ${primaryKeyName})`;
|
+ (whereBranch ? `WHERE ${whereBranch} ` : '') + `GROUP BY SUBSTR(${primaryKeyName}, 1, 1) ORDER BY ${primaryKeyName}`;
|
||||||
|
|
||||||
let contentToHash = await sql.getValue(query);
|
const map = await sql.getMap(query);
|
||||||
|
|
||||||
if (!contentToHash) { // might be null in case of no rows
|
for (const key in map) {
|
||||||
contentToHash = "";
|
map[key] = utils.hash(map[key]);
|
||||||
}
|
}
|
||||||
|
|
||||||
return utils.hash(contentToHash);
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getHashes() {
|
async function getEntityHashes() {
|
||||||
const startTime = new Date();
|
const startTime = new Date();
|
||||||
|
|
||||||
const hashes = {
|
const hashes = {
|
||||||
notes: await getHash(Note.entityName, Note.primaryKeyName),
|
notes: await getSectorHashes(Note.entityName, Note.primaryKeyName),
|
||||||
note_contents: await getHash("note_contents", "noteId"),
|
note_contents: await getSectorHashes("note_contents", "noteId"),
|
||||||
branches: await getHash(Branch.entityName, Branch.primaryKeyName),
|
branches: await getSectorHashes(Branch.entityName, Branch.primaryKeyName),
|
||||||
note_revisions: await getHash(NoteRevision.entityName, NoteRevision.primaryKeyName),
|
note_revisions: await getSectorHashes(NoteRevision.entityName, NoteRevision.primaryKeyName),
|
||||||
note_revision_contents: await getHash("note_revision_contents", "noteRevisionId"),
|
note_revision_contents: await getSectorHashes("note_revision_contents", "noteRevisionId"),
|
||||||
recent_notes: await getHash(RecentNote.entityName, RecentNote.primaryKeyName),
|
recent_notes: await getSectorHashes(RecentNote.entityName, RecentNote.primaryKeyName),
|
||||||
options: await getHash(Option.entityName, Option.primaryKeyName, "isSynced = 1"),
|
options: await getSectorHashes(Option.entityName, Option.primaryKeyName, "isSynced = 1"),
|
||||||
attributes: await getHash(Attribute.entityName, Attribute.primaryKeyName),
|
attributes: await getSectorHashes(Attribute.entityName, Attribute.primaryKeyName),
|
||||||
api_tokens: await getHash(ApiToken.entityName, ApiToken.primaryKeyName),
|
api_tokens: await getSectorHashes(ApiToken.entityName, ApiToken.primaryKeyName),
|
||||||
};
|
};
|
||||||
|
|
||||||
const elapseTimeMs = Date.now() - startTime.getTime();
|
const elapsedTimeMs = Date.now() - startTime.getTime();
|
||||||
|
|
||||||
log.info(`Content hash computation took ${elapseTimeMs}ms`);
|
log.info(`Content hash computation took ${elapsedTimeMs}ms`);
|
||||||
|
|
||||||
return hashes;
|
return hashes;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function checkContentHashes(otherHashes) {
|
async function checkContentHashes(otherHashes) {
|
||||||
const hashes = await getHashes();
|
const entityHashes = await getEntityHashes();
|
||||||
let allChecksPassed = true;
|
const failedChecks = [];
|
||||||
|
|
||||||
for (const key in hashes) {
|
for (const entityName in entityHashes) {
|
||||||
if (hashes[key] !== otherHashes[key]) {
|
const thisSectorHashes = entityHashes[entityName];
|
||||||
allChecksPassed = false;
|
const otherSectorHashes = otherHashes[entityName];
|
||||||
|
|
||||||
log.info(`Content hash check for ${key} FAILED. Local is ${hashes[key]}, remote is ${otherHashes[key]}`);
|
const sectors = new Set(Object.keys(entityHashes).concat(Object.keys(otherHashes)));
|
||||||
|
|
||||||
if (key !== 'recent_notes') {
|
for (const sector of sectors) {
|
||||||
// let's not get alarmed about recent notes which get updated often and can cause failures in race conditions
|
if (thisSectorHashes[sector] !== otherSectorHashes[sector]) {
|
||||||
ws.sendMessageToAllClients({type: 'sync-hash-check-failed'});
|
log.info(`Content hash check for ${entityName} sector ${sector} FAILED. Local is ${thisSectorHashes[sector]}, remote is ${otherSectorHashes[sector]}`);
|
||||||
|
|
||||||
|
failedChecks.push({ entityName, sector });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allChecksPassed) {
|
if (failedChecks.length === 0) {
|
||||||
log.info("Content hash checks PASSED");
|
log.info("Content hash checks PASSED");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getHashes,
|
getHashes: getEntityHashes,
|
||||||
checkContentHashes
|
checkContentHashes
|
||||||
};
|
};
|
Loading…
x
Reference in New Issue
Block a user