implemented sync hash check recovery process

This commit is contained in:
zadam 2019-12-18 22:58:30 +01:00
parent 77311954a1
commit d47403c0e7
6 changed files with 72 additions and 21 deletions

View File

@ -10,6 +10,8 @@ const contentHashService = require('../../services/content_hash');
const log = require('../../services/log'); const log = require('../../services/log');
const syncOptions = require('../../services/sync_options'); const syncOptions = require('../../services/sync_options');
const dateUtils = require('../../services/date_utils'); const dateUtils = require('../../services/date_utils');
const entityConstructor = require('../../entities/entity_constructor');
const utils = require('../../services/utils');
async function testSync() { async function testSync() {
try { try {
@ -47,7 +49,7 @@ async function getStats() {
async function checkSync() { async function checkSync() {
return { return {
hashes: await contentHashService.getHashes(), entityHashes: await contentHashService.getEntityHashes(),
maxSyncId: await sql.getValue('SELECT MAX(id) FROM sync') maxSyncId: await sql.getValue('SELECT MAX(id) FROM sync')
}; };
} }
@ -137,6 +139,15 @@ async function syncFinished() {
await sqlInit.dbInitialized(); await sqlInit.dbInitialized();
} }
async function queueSector(req) {
const entityName = utils.sanitizeSqlIdentifier(req.params.entityName);
const sector = utils.sanitizeSqlIdentifier(req.params.sector);
const entityPrimaryKey = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
await syncTableService.addEntitySyncsForSector(entityName, entityPrimaryKey, sector);
}
module.exports = { module.exports = {
testSync, testSync,
checkSync, checkSync,
@ -147,5 +158,6 @@ module.exports = {
getChanged, getChanged,
update, update,
getStats, getStats,
syncFinished syncFinished,
queueSector
}; };

View File

@ -199,6 +199,7 @@ function register(app) {
route(GET, '/api/sync/changed', [auth.checkApiAuth], syncApiRoute.getChanged, apiResultHandler); route(GET, '/api/sync/changed', [auth.checkApiAuth], syncApiRoute.getChanged, apiResultHandler);
route(PUT, '/api/sync/update', [auth.checkApiAuth], syncApiRoute.update, apiResultHandler); route(PUT, '/api/sync/update', [auth.checkApiAuth], syncApiRoute.update, apiResultHandler);
route(POST, '/api/sync/finished', [auth.checkApiAuth], syncApiRoute.syncFinished, apiResultHandler); route(POST, '/api/sync/finished', [auth.checkApiAuth], syncApiRoute.syncFinished, apiResultHandler);
route(POST, '/api/sync/queue-sector/:entityName/:sector', [auth.checkApiAuth], syncApiRoute.queueSector, apiResultHandler);
route(GET, '/api/sync/stats', [], syncApiRoute.getStats, apiResultHandler); route(GET, '/api/sync/stats', [], syncApiRoute.getStats, apiResultHandler);
apiRoute(POST, '/api/recent-notes', recentNotesRoute.addRecentNote); apiRoute(POST, '/api/recent-notes', recentNotesRoute.addRecentNote);

View File

@ -3,7 +3,6 @@
const sql = require('./sql'); const sql = require('./sql');
const utils = require('./utils'); const utils = require('./utils');
const log = require('./log'); const log = require('./log');
const ws = require('./ws.js');
const ApiToken = require('../entities/api_token'); const ApiToken = require('../entities/api_token');
const Branch = require('../entities/branch'); const Branch = require('../entities/branch');
const Note = require('../entities/note'); const Note = require('../entities/note');
@ -70,9 +69,11 @@ async function checkContentHashes(otherHashes) {
if (failedChecks.length === 0) { if (failedChecks.length === 0) {
log.info("Content hash checks PASSED"); log.info("Content hash checks PASSED");
} }
return failedChecks;
} }
module.exports = { module.exports = {
getHashes: getEntityHashes, getEntityHashes,
checkContentHashes checkContentHashes
}; };

View File

@ -15,6 +15,8 @@ const syncMutexService = require('./sync_mutex');
const cls = require('./cls'); const cls = require('./cls');
const request = require('./request'); const request = require('./request');
const ws = require('./ws'); const ws = require('./ws');
const syncTableService = require('./sync_table');
const entityConstructor = require('../entities/entity_constructor');
let proxyToggle = true; let proxyToggle = true;
@ -30,17 +32,22 @@ async function sync() {
return { success: false, message: 'Sync not configured' }; return { success: false, message: 'Sync not configured' };
} }
const syncContext = await login(); let continueSync = false;
await pushSync(syncContext); do {
const syncContext = await login();
await pullSync(syncContext); await pushSync(syncContext);
await pushSync(syncContext); await pullSync(syncContext);
await syncFinished(syncContext); await pushSync(syncContext);
await checkContentHash(syncContext); await syncFinished(syncContext);
continueSync = await checkContentHash(syncContext);
}
while (continueSync);
return { return {
success: true success: true
@ -225,7 +232,7 @@ async function checkContentHash(syncContext) {
if (await getLastSyncedPull() < resp.maxSyncId) { if (await getLastSyncedPull() < resp.maxSyncId) {
log.info("There are some outstanding pulls, skipping content check."); log.info("There are some outstanding pulls, skipping content check.");
return; return true;
} }
const notPushedSyncs = await sql.getValue("SELECT EXISTS(SELECT 1 FROM sync WHERE id > ?)", [await getLastSyncedPush()]); const notPushedSyncs = await sql.getValue("SELECT EXISTS(SELECT 1 FROM sync WHERE id > ?)", [await getLastSyncedPush()]);
@ -233,10 +240,20 @@ async function checkContentHash(syncContext) {
if (notPushedSyncs) { if (notPushedSyncs) {
log.info(`There's ${notPushedSyncs} outstanding pushes, skipping content check.`); log.info(`There's ${notPushedSyncs} outstanding pushes, skipping content check.`);
return; return true;
} }
await contentHashService.checkContentHashes(resp.hashes); const failedChecks = await contentHashService.checkContentHashes(resp.entityHashes);
for (const {entityName, sector} of failedChecks) {
const entityPrimaryKey = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
await syncTableService.addEntitySyncsForSector(entityName, entityPrimaryKey, sector);
await syncRequest(syncContext, 'POST', `/api/sync/queue-sector/${entityName}/${sector}`);
}
return failedChecks.length > 0;
} }
async function syncRequest(syncContext, method, requestPath, body) { async function syncRequest(syncContext, method, requestPath, body) {

View File

@ -6,7 +6,7 @@ const cls = require('./cls');
let syncs = []; let syncs = [];
async function addEntitySync(entityName, entityId, sourceId) { async function insertEntitySync(entityName, entityId, sourceId) {
const sync = { const sync = {
entityName: entityName, entityName: entityName,
entityId: entityId, entityId: entityId,
@ -16,11 +16,25 @@ async function addEntitySync(entityName, entityId, sourceId) {
sync.id = await sql.replace("sync", sync); sync.id = await sql.replace("sync", sync);
return sync;
}
async function addEntitySync(entityName, entityId, sourceId) {
const sync = await insertEntitySync(entityName, entityId, sourceId);
syncs.push(sync); syncs.push(sync);
setTimeout(() => require('./ws').sendPingToAllClients(), 50); setTimeout(() => require('./ws').sendPingToAllClients(), 50);
} }
async function addEntitySyncsForSector(entityName, entityPrimaryKey, sector) {
const entityIds = await sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} WHERE SUBSTR(${entityPrimaryKey}, 1, 1) = ?`, [sector]);
for (const entityId of entityIds) {
await insertEntitySync(entityName, entityId, 'content-check');
}
}
function getMaxSyncId() { function getMaxSyncId() {
return syncs.length === 0 ? 0 : syncs[syncs.length - 1].id; return syncs.length === 0 ? 0 : syncs[syncs.length - 1].id;
} }
@ -29,19 +43,19 @@ function getEntitySyncsNewerThan(syncId) {
return syncs.filter(s => s.id > syncId); return syncs.filter(s => s.id > syncId);
} }
async function cleanupSyncRowsForMissingEntities(entityName, entityKey) { async function cleanupSyncRowsForMissingEntities(entityName, entityPrimaryKey) {
await sql.execute(` await sql.execute(`
DELETE DELETE
FROM sync FROM sync
WHERE sync.entityName = '${entityName}' WHERE sync.entityName = '${entityName}'
AND sync.entityId NOT IN (SELECT ${entityKey} FROM ${entityName})`); AND sync.entityId NOT IN (SELECT ${entityPrimaryKey} FROM ${entityName})`);
} }
async function fillSyncRows(entityName, entityKey, condition = '') { async function fillSyncRows(entityName, entityPrimaryKey, condition = '') {
try { try {
await cleanupSyncRowsForMissingEntities(entityName, entityKey); await cleanupSyncRowsForMissingEntities(entityName, entityPrimaryKey);
const entityIds = await sql.getColumn(`SELECT ${entityKey} FROM ${entityName}` const entityIds = await sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName}`
+ (condition ? ` WHERE ${condition}` : '')); + (condition ? ` WHERE ${condition}` : ''));
let createdCount = 0; let createdCount = 0;
@ -69,7 +83,7 @@ async function fillSyncRows(entityName, entityKey, condition = '') {
catch (e) { catch (e) {
// this is to fix migration from 0.30 to 0.32, can be removed later // this is to fix migration from 0.30 to 0.32, can be removed later
// see https://github.com/zadam/trilium/issues/557 // see https://github.com/zadam/trilium/issues/557
log.error(`Filling sync rows failed for ${entityName} ${entityKey} with error "${e.message}", continuing`); log.error(`Filling sync rows failed for ${entityName} ${entityPrimaryKey} with error "${e.message}", continuing`);
} }
} }
@ -101,5 +115,6 @@ module.exports = {
addEntitySync, addEntitySync,
fillAllSyncRows, fillAllSyncRows,
getEntitySyncsNewerThan, getEntitySyncsNewerThan,
getMaxSyncId getMaxSyncId,
addEntitySyncsForSector
}; };

View File

@ -53,6 +53,10 @@ function sanitizeSql(str) {
return str.replace(/'/g, "''"); return str.replace(/'/g, "''");
} }
function sanitizeSqlIdentifier(str) {
return str.replace(/[A-Za-z0-9_]/g, "");
}
function prepareSqlForLike(prefix, str, suffix) { function prepareSqlForLike(prefix, str, suffix) {
const value = str const value = str
.replace(/\\/g, "\\\\") .replace(/\\/g, "\\\\")
@ -174,6 +178,7 @@ module.exports = {
hash, hash,
isEmptyOrWhitespace, isEmptyOrWhitespace,
sanitizeSql, sanitizeSql,
sanitizeSqlIdentifier,
prepareSqlForLike, prepareSqlForLike,
stopWatch, stopWatch,
escapeHtml, escapeHtml,