sync content check refactoring

This commit is contained in:
azivner 2018-04-07 22:59:47 -04:00
parent 36b15f474d
commit 982796255d
2 changed files with 35 additions and 40 deletions

View File

@ -1,6 +1,10 @@
"use strict";
const sql = require('./sql'); const sql = require('./sql');
const utils = require('./utils'); const utils = require('./utils');
const log = require('./log'); const log = require('./log');
const eventLogService = require('./event_log');
const messagingService = require('./messaging');
function getHash(rows) { function getHash(rows) {
let hash = ''; let hash = '';
@ -121,6 +125,29 @@ async function getHashes() {
return hashes; return hashes;
} }
async function checkContentHashes(otherHashes) {
const hashes = await getHashes();
let allChecksPassed = true;
for (const key in hashes) {
if (hashes[key] !== otherHashes[key]) {
allChecksPassed = false;
await eventLogService.addEvent(`Content hash check for ${key} FAILED. Local is ${hashes[key]}, remote is ${resp.hashes[key]}`);
if (key !== 'recent_notes') {
// let's not get alarmed about recent notes which get updated often and can cause failures in race conditions
await messagingService.sendMessageToAllClients({type: 'sync-hash-check-failed'});
}
}
}
if (allChecksPassed) {
log.info("Content hash checks PASSED");
}
}
module.exports = { module.exports = {
getHashes getHashes,
checkContentHashes
}; };

View File

@ -10,10 +10,8 @@ const sourceIdService = require('./source_id');
const dateUtils = require('./date_utils'); const dateUtils = require('./date_utils');
const syncUpdateService = require('./sync_update'); const syncUpdateService = require('./sync_update');
const contentHashService = require('./content_hash'); const contentHashService = require('./content_hash');
const eventLogService = require('./event_log');
const fs = require('fs'); const fs = require('fs');
const appInfo = require('./app_info'); const appInfo = require('./app_info');
const messagingService = require('./messaging');
const syncSetup = require('./sync_setup'); const syncSetup = require('./sync_setup');
const syncMutexService = require('./sync_mutex'); const syncMutexService = require('./sync_mutex');
const cls = require('./cls'); const cls = require('./cls');
@ -183,12 +181,6 @@ async function pushSync(syncContext) {
} }
} }
function serializeNoteContentBuffer(note) {
if (note.type === 'file') {
note.content = note.content.toString("binary");
}
}
async function checkContentHash(syncContext) { async function checkContentHash(syncContext) {
const resp = await syncRequest(syncContext, 'GET', '/api/sync/check'); const resp = await syncRequest(syncContext, 'GET', '/api/sync/check');
@ -207,25 +199,7 @@ async function checkContentHash(syncContext) {
return; return;
} }
const hashes = await contentHashService.getHashes(); await contentHashService.checkContentHashes(resp.hashes);
let allChecksPassed = true;
for (const key in hashes) {
if (hashes[key] !== resp.hashes[key]) {
allChecksPassed = false;
await eventLogService.addEvent(`Content hash check for ${key} FAILED. Local is ${hashes[key]}, remote is ${resp.hashes[key]}`);
if (key !== 'recent_notes') {
// let's not get alarmed about recent notes which get updated often and can cause failures in race conditions
await messagingService.sendMessageToAllClients({type: 'sync-hash-check-failed'});
}
}
}
if (allChecksPassed) {
log.info("Content hash checks PASSED");
}
} }
async function syncRequest(syncContext, method, uri, body) { async function syncRequest(syncContext, method, uri, body) {
@ -270,7 +244,7 @@ const primaryKeys = {
async function getEntityRow(entityName, entityId) { async function getEntityRow(entityName, entityId) {
if (entityName === 'note_reordering') { if (entityName === 'note_reordering') {
return await getNoteReordering(entityId); return await sql.getMap("SELECT branchId, notePosition FROM branches WHERE parentNoteId = ? AND isDeleted = 0", [entityId]);
} }
else { else {
const primaryKey = primaryKeys[entityName]; const primaryKey = primaryKeys[entityName];
@ -279,16 +253,16 @@ async function getEntityRow(entityName, entityId) {
throw new Error("Unknown entity " + entityName); throw new Error("Unknown entity " + entityName);
} }
const entityRow = await sql.getRow(`SELECT * FROM ${entityName} WHERE ${primaryKey} = ?`, [entityId]); const entity = await sql.getRow(`SELECT * FROM ${entityName} WHERE ${primaryKey} = ?`, [entityId]);
if (entityName === 'notes') { if (entityName === 'notes' && entity.type === 'file') {
serializeNoteContentBuffer(entityRow); entity.content = entity.content.toString("binary");
} }
else if (entityName === 'images') { else if (entityName === 'images') {
entityRow.data = entityRow.data.toString('base64'); entity.data = entity.data.toString('base64');
} }
return entityRow; return entity;
} }
} }
@ -314,10 +288,6 @@ async function getSyncRecords(syncs) {
return records; return records;
} }
async function getNoteReordering(parentNoteId) {
return await sql.getMap("SELECT branchId, notePosition FROM branches WHERE parentNoteId = ? AND isDeleted = 0", [parentNoteId])
}
sqlInit.dbReady.then(() => { sqlInit.dbReady.then(() => {
if (syncSetup.isSyncSetup) { if (syncSetup.isSyncSetup) {
log.info("Setting up sync to " + syncSetup.SYNC_SERVER + " with timeout " + syncSetup.SYNC_TIMEOUT); log.info("Setting up sync to " + syncSetup.SYNC_SERVER + " with timeout " + syncSetup.SYNC_TIMEOUT);
@ -344,7 +314,5 @@ sqlInit.dbReady.then(() => {
module.exports = { module.exports = {
sync, sync,
serializeNoteContentBuffer,
getEntityRow,
getSyncRecords getSyncRecords
}; };