mirror of
https://github.com/zadam/trilium.git
synced 2025-06-05 01:18:44 +02:00
added content hash check
This commit is contained in:
parent
d7644de666
commit
79a803ccc9
3
migrations/0043__clear_sync_table.sql
Normal file
3
migrations/0043__clear_sync_table.sql
Normal file
@ -0,0 +1,3 @@
|
||||
DELETE FROM sync;
|
||||
|
||||
UPDATE options SET opt_value = 0 WHERE opt_name IN ('last_synced_push', 'last_synced_pull');
|
@ -7,6 +7,14 @@ const sync = require('../../services/sync');
|
||||
const syncUpdate = require('../../services/sync_update');
|
||||
const sql = require('../../services/sql');
|
||||
const options = require('../../services/options');
|
||||
const content_hash = require('../../services/content_hash');
|
||||
|
||||
router.get('/check', auth.checkApiAuth, async (req, res, next) => {
|
||||
res.send({
|
||||
'content_hash': await content_hash.getContentHash(),
|
||||
'max_sync_id': await sql.getSingleValue('SELECT MAX(id) FROM sync')
|
||||
});
|
||||
});
|
||||
|
||||
router.post('/now', auth.checkApiAuth, async (req, res, next) => {
|
||||
res.send(await sync.sync());
|
||||
@ -59,10 +67,10 @@ router.get('/notes_reordering/:noteTreeParentId', auth.checkApiAuth, async (req,
|
||||
});
|
||||
});
|
||||
|
||||
router.get('/recent_notes/:noteId', auth.checkApiAuth, async (req, res, next) => {
|
||||
const noteId = req.params.noteId;
|
||||
router.get('/recent_notes/:notePath', auth.checkApiAuth, async (req, res, next) => {
|
||||
const notePath = req.params.notePath;
|
||||
|
||||
res.send(await sql.getSingleResult("SELECT * FROM recent_notes WHERE note_id = ?", [noteId]));
|
||||
res.send(await sql.getSingleResult("SELECT * FROM recent_notes WHERE note_path = ?", [notePath]));
|
||||
});
|
||||
|
||||
router.put('/notes', auth.checkApiAuth, async (req, res, next) => {
|
||||
|
38
services/content_hash.js
Normal file
38
services/content_hash.js
Normal file
@ -0,0 +1,38 @@
|
||||
const sql = require('./sql');
|
||||
const utils = require('./utils');
|
||||
const options = require('./options');
|
||||
|
||||
function updateHash(hash, rows) {
|
||||
for (const row of rows) {
|
||||
hash = utils.hash(hash + JSON.stringify(row));
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
async function getContentHash() {
|
||||
let hash = '';
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_id, note_title, note_text, date_modified, is_protected, " +
|
||||
"is_deleted FROM notes ORDER BY note_id"));
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_tree_id, note_id, note_pid, note_pos, date_modified, " +
|
||||
"is_deleted FROM notes_tree ORDER BY note_tree_id"));
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_history_id, note_id, note_title, note_text, " +
|
||||
"date_modified_from, date_modified_to FROM notes_history ORDER BY note_history_id"));
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT note_path, date_accessed, is_deleted FROM recent_notes " +
|
||||
"ORDER BY note_path"));
|
||||
|
||||
const questionMarks = Array(options.SYNCED_OPTIONS.length).fill('?').join(',');
|
||||
|
||||
hash = updateHash(hash, await sql.getResults("SELECT opt_name, opt_value FROM options " +
|
||||
"WHERE opt_name IN (" + questionMarks + ") ORDER BY opt_name", options.SYNCED_OPTIONS));
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getContentHash
|
||||
};
|
@ -4,7 +4,7 @@ const options = require('./options');
|
||||
const fs = require('fs-extra');
|
||||
const log = require('./log');
|
||||
|
||||
const APP_DB_VERSION = 42;
|
||||
const APP_DB_VERSION = 43;
|
||||
const MIGRATIONS_DIR = "migrations";
|
||||
|
||||
async function migrate() {
|
||||
|
@ -130,14 +130,14 @@ async function deleteRecentAudits(category, browserId, noteId) {
|
||||
}
|
||||
|
||||
async function wrap(func) {
|
||||
const thisError = new Error();
|
||||
|
||||
const db = await dbReady;
|
||||
|
||||
try {
|
||||
return await func(db);
|
||||
}
|
||||
catch (e) {
|
||||
const thisError = new Error();
|
||||
|
||||
log.error("Error executing query. Inner exception: " + e.stack + thisError.stack);
|
||||
|
||||
throw thisError;
|
||||
|
@ -10,6 +10,8 @@ const config = require('./config');
|
||||
const source_id = require('./source_id');
|
||||
const notes = require('./notes');
|
||||
const syncUpdate = require('./sync_update');
|
||||
const content_hash = require('./content_hash');
|
||||
const event_log = require('./event_log');
|
||||
|
||||
const SYNC_SERVER = config['Sync']['syncServerHost'];
|
||||
const isSyncSetup = !!SYNC_SERVER;
|
||||
@ -49,6 +51,8 @@ async function sync() {
|
||||
|
||||
await pushSync(syncContext);
|
||||
|
||||
await checkContentHash(syncContext);
|
||||
|
||||
return {
|
||||
success: true
|
||||
};
|
||||
@ -97,8 +101,12 @@ async function login() {
|
||||
return syncContext;
|
||||
}
|
||||
|
||||
async function getLastSyncedPull() {
|
||||
return parseInt(await options.getOption('last_synced_pull'));
|
||||
}
|
||||
|
||||
async function pullSync(syncContext) {
|
||||
const lastSyncedPull = parseInt(await options.getOption('last_synced_pull'));
|
||||
const lastSyncedPull = await getLastSyncedPull();
|
||||
|
||||
const changesUri = '/api/sync/changed?lastSyncId=' + lastSyncedPull;
|
||||
|
||||
@ -145,8 +153,12 @@ async function pullSync(syncContext) {
|
||||
log.info("Finished pull");
|
||||
}
|
||||
|
||||
async function getLastSyncedPush() {
|
||||
return parseInt(await options.getOption('last_synced_push'));
|
||||
}
|
||||
|
||||
async function pushSync(syncContext) {
|
||||
let lastSyncedPush = parseInt(await options.getOption('last_synced_push'));
|
||||
let lastSyncedPush = await getLastSyncedPush();
|
||||
|
||||
while (true) {
|
||||
const sync = await sql.getSingleResultOrNull('SELECT * FROM sync WHERE id > ? LIMIT 1', [lastSyncedPush]);
|
||||
@ -223,6 +235,34 @@ async function sendEntity(syncContext, entity, entityName) {
|
||||
await syncRequest(syncContext, 'PUT', '/api/sync/' + entityName, payload);
|
||||
}
|
||||
|
||||
async function checkContentHash(syncContext) {
|
||||
const lastSyncedPush = await getLastSyncedPush();
|
||||
const notPushedSyncs = await sql.getSingleValue("SELECT COUNT(*) FROM sync WHERE id > ?", [lastSyncedPush]);
|
||||
|
||||
if (notPushedSyncs > 0) {
|
||||
log.info("There's " + notPushedSyncs + " outstanding pushes, skipping content check.");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const resp = await syncRequest(syncContext, 'GET', '/api/sync/check');
|
||||
|
||||
// if (await getLastSyncedPull() < resp.max_sync_id) {
|
||||
// log.info("There are some outstanding pulls, skipping content check.");
|
||||
//
|
||||
// return;
|
||||
// }
|
||||
|
||||
const localContentHash = await content_hash.getContentHash();
|
||||
|
||||
if (resp.content_hash === localContentHash) {
|
||||
log.info("Content hash check passed with value: " + localContentHash);
|
||||
}
|
||||
else {
|
||||
await event_log.addEvent("Content hash check failed. Local is " + localContentHash + ", remote is " + resp.content_hash);
|
||||
}
|
||||
}
|
||||
|
||||
async function syncRequest(syncContext, method, uri, body) {
|
||||
const fullUri = SYNC_SERVER + uri;
|
||||
|
||||
|
@ -112,7 +112,7 @@ async function updateRecentNotes(entity, sourceId) {
|
||||
await sql.doInTransaction(async () => {
|
||||
await sql.replace('recent_notes', entity);
|
||||
|
||||
await sync_table.addRecentNoteSync(entity.note_id, sourceId);
|
||||
await sync_table.addRecentNoteSync(entity.note_path, sourceId);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -66,6 +66,11 @@ function formatTwoTimestamps(origTS, newTS) {
|
||||
return "orig: " + formatDateTimeFromTS(origTS) + ", new: " + formatDateTimeFromTS(newTS);
|
||||
}
|
||||
|
||||
function hash(text) {
|
||||
return crypto.createHash('sha1').update(text).digest('base64');
|
||||
}
|
||||
|
||||
|
||||
module.exports = {
|
||||
randomSecureToken,
|
||||
randomString,
|
||||
@ -78,5 +83,6 @@ module.exports = {
|
||||
hmac,
|
||||
browserId,
|
||||
isElectron,
|
||||
formatTwoTimestamps
|
||||
formatTwoTimestamps,
|
||||
hash
|
||||
};
|
Loading…
x
Reference in New Issue
Block a user