sync fixes and refactorings

This commit is contained in:
zadam 2023-07-29 21:59:20 +02:00
parent 2a7fe85020
commit 04b125afc0
14 changed files with 109 additions and 190 deletions

View File

@ -67,7 +67,6 @@ export default class TreeContextMenu {
{ title: "Advanced", uiIcon: "bx bx-empty", enabled: true, items: [
{ title: 'Expand subtree <kbd data-command="expandSubtree"></kbd>', command: "expandSubtree", uiIcon: "bx bx-expand", enabled: noSelectedNotes },
{ title: 'Collapse subtree <kbd data-command="collapseSubtree"></kbd>', command: "collapseSubtree", uiIcon: "bx bx-collapse", enabled: noSelectedNotes },
{ title: "Force note sync", command: "forceNoteSync", uiIcon: "bx bx-refresh", enabled: noSelectedNotes },
{ title: 'Sort by ... <kbd data-command="sortChildNotes"></kbd>', command: "sortChildNotes", uiIcon: "bx bx-empty", enabled: noSelectedNotes && notSearch },
{ title: 'Recent changes in subtree', command: "recentChangesInSubtree", uiIcon: "bx bx-history", enabled: noSelectedNotes },
{ title: 'Convert to attachment', command: "convertNoteToAttachment", uiIcon: "bx bx-empty", enabled: isNotRoot && !isHoisted }

View File

@ -18,13 +18,6 @@ async function syncNow(ignoreNotConfigured = false) {
}
}
async function forceNoteSync(noteId) {
await server.post(`sync/force-note-sync/${noteId}`);
toastService.showMessage("Note added to sync queue.");
}
export default {
syncNow,
forceNoteSync
syncNow
};

View File

@ -1564,10 +1564,6 @@ export default class NoteTreeWidget extends NoteContextAwareWidget {
this.triggerCommand("showImportDialog", {noteId: node.data.noteId});
}
forceNoteSyncCommand({node}) {
syncService.forceNoteSync(node.data.noteId);
}
editNoteTitleCommand({node}) {
appContext.triggerCommand('focusOnTitle');
}

View File

@ -3,7 +3,7 @@
const options = require('../../services/options');
const utils = require('../../services/utils');
const dateUtils = require('../../services/date_utils');
const instanceId = require('../../services/member_id');
const instanceId = require('../../services/instance_id');
const passwordEncryptionService = require('../../services/encryption/password_encryption');
const protectedSessionService = require('../../services/protected_session');
const appInfo = require('../../services/app_info');

View File

@ -9,10 +9,8 @@ const optionService = require('../../services/options');
const contentHashService = require('../../services/content_hash');
const log = require('../../services/log');
const syncOptions = require('../../services/sync_options');
const dateUtils = require('../../services/date_utils');
const utils = require('../../services/utils');
const ws = require('../../services/ws');
const becca = require("../../becca/becca");
async function testSync() {
try {
@ -84,54 +82,14 @@ function forceFullSync() {
syncService.sync();
}
function forceNoteSync(req) {
const noteId = req.params.noteId;
const note = becca.getNote(noteId);
const now = dateUtils.utcNowDateTime();
sql.execute(`UPDATE notes SET utcDateModified = ? WHERE noteId = ?`, [now, noteId]);
entityChangesService.moveEntityChangeToTop('notes', noteId);
sql.execute(`UPDATE blobs SET utcDateModified = ? WHERE blobId = ?`, [now, note.blobId]);
entityChangesService.moveEntityChangeToTop('blobs', note.blobId);
for (const branchId of sql.getColumn("SELECT branchId FROM branches WHERE noteId = ?", [noteId])) {
sql.execute(`UPDATE branches SET utcDateModified = ? WHERE branchId = ?`, [now, branchId]);
entityChangesService.moveEntityChangeToTop('branches', branchId);
}
for (const attributeId of sql.getColumn("SELECT attributeId FROM attributes WHERE noteId = ?", [noteId])) {
sql.execute(`UPDATE attributes SET utcDateModified = ? WHERE attributeId = ?`, [now, attributeId]);
entityChangesService.moveEntityChangeToTop('attributes', attributeId);
}
for (const revisionId of sql.getColumn("SELECT revisionId FROM revisions WHERE noteId = ?", [noteId])) {
sql.execute(`UPDATE revisions SET utcDateModified = ? WHERE revisionId = ?`, [now, revisionId]);
entityChangesService.moveEntityChangeToTop('revisions', revisionId);
}
for (const attachmentId of sql.getColumn("SELECT attachmentId FROM attachments WHERE noteId = ?", [noteId])) {
sql.execute(`UPDATE attachments SET utcDateModified = ? WHERE attachmentId = ?`, [now, attachmentId]);
entityChangesService.moveEntityChangeToTop('attachments', attachmentId);
}
log.info(`Forcing note sync for ${noteId}`);
// not awaiting for the job to finish (will probably take a long time)
syncService.sync();
}
function getChanged(req) {
const startTime = Date.now();
let lastEntityChangeId = parseInt(req.query.lastEntityChangeId);
const clientinstanceId = req.query.instanceId;
const clientInstanceId = req.query.instanceId;
let filteredEntityChanges = [];
while (filteredEntityChanges.length === 0) {
do {
const entityChanges = sql.getRows(`
SELECT *
FROM entity_changes
@ -144,20 +102,22 @@ function getChanged(req) {
break;
}
filteredEntityChanges = entityChanges.filter(ec => ec.instanceId !== clientinstanceId);
filteredEntityChanges = entityChanges.filter(ec => ec.instanceId !== clientInstanceId);
if (filteredEntityChanges.length === 0) {
lastEntityChangeId = entityChanges[entityChanges.length - 1].id;
}
}
} while (filteredEntityChanges.length === 0);
const entityChangeRecords = syncService.getEntityChangeRecords(filteredEntityChanges);
if (entityChangeRecords.length > 0) {
lastEntityChangeId = entityChangeRecords[entityChangeRecords.length - 1].entityChange.id;
log.info(`Returning ${entityChangeRecords.length} entity changes in ${Date.now() - startTime}ms`);
}
const ret = {
return {
entityChanges: entityChangeRecords,
lastEntityChangeId,
outstandingPullCount: sql.getValue(`
@ -165,14 +125,8 @@ function getChanged(req) {
FROM entity_changes
WHERE isSynced = 1
AND instanceId != ?
AND id > ?`, [clientinstanceId, lastEntityChangeId])
AND id > ?`, [clientInstanceId, lastEntityChangeId])
};
if (ret.entityChanges.length > 0) {
log.info(`Returning ${ret.entityChanges.length} entity changes in ${Date.now() - startTime}ms`);
}
return ret;
}
const partialRequests = {};
@ -194,12 +148,12 @@ function update(req) {
}
if (!partialRequests[requestId]) {
throw new Error(`Partial request ${requestId}, index ${pageIndex} of ${pageCount} of pages does not have expected record.`);
throw new Error(`Partial request ${requestId}, page ${pageIndex + 1} of ${pageCount} of pages does not have expected record.`);
}
partialRequests[requestId].payload += req.body;
log.info(`Receiving partial request ${requestId}, page index ${pageIndex} out of ${pageCount} pages.`);
log.info(`Receiving a partial request ${requestId}, page ${pageIndex + 1} out of ${pageCount} pages.`);
if (pageIndex !== pageCount - 1) {
return;
@ -212,9 +166,11 @@ function update(req) {
const {entities, instanceId} = body;
for (const {entityChange, entity} of entities) {
syncUpdateService.updateEntity(entityChange, entity, instanceId);
}
sql.transactional(() => {
for (const {entityChange, entity} of entities) {
syncUpdateService.updateEntity(entityChange, entity, instanceId);
}
});
}
setInterval(() => {
@ -241,8 +197,7 @@ function queueSector(req) {
}
function checkEntityChanges() {
const consistencyChecks = require("../../services/consistency_checks");
consistencyChecks.runEntityChangesChecks();
require("../../services/consistency_checks").runEntityChangesChecks();
}
module.exports = {
@ -251,7 +206,6 @@ module.exports = {
syncNow,
fillEntityChanges,
forceFullSync,
forceNoteSync,
getChanged,
update,
getStats,

View File

@ -216,7 +216,6 @@ function register(app) {
apiRoute(PST, '/api/sync/now', syncApiRoute.syncNow);
apiRoute(PST, '/api/sync/fill-entity-changes', syncApiRoute.fillEntityChanges);
apiRoute(PST, '/api/sync/force-full-sync', syncApiRoute.forceFullSync);
apiRoute(PST, '/api/sync/force-note-sync/:noteId', syncApiRoute.forceNoteSync);
route(GET, '/api/sync/check', [auth.checkApiAuth], syncApiRoute.checkSync, apiResultHandler);
route(GET, '/api/sync/changed', [auth.checkApiAuth], syncApiRoute.getChanged, apiResultHandler);
route(PUT, '/api/sync/update', [auth.checkApiAuth], syncApiRoute.update, apiResultHandler);

View File

@ -597,14 +597,10 @@ class ConsistencyChecks {
runEntityChangeChecks(entityName, key) {
this.findAndFixIssues(`
SELECT
${key} as entityId
FROM
${entityName}
LEFT JOIN entity_changes ON entity_changes.entityName = '${entityName}'
AND entity_changes.entityId = ${key}
WHERE
entity_changes.id IS NULL`,
SELECT ${key} as entityId
FROM ${entityName}
LEFT JOIN entity_changes ec ON ec.entityName = '${entityName}' AND ec.entityId = ${entityName}.${key}
WHERE ec.id IS NULL`,
({entityId}) => {
const entityRow = sql.getRow(`SELECT * FROM ${entityName} WHERE ${key} = ?`, [entityId]);
@ -613,7 +609,7 @@ class ConsistencyChecks {
entityName,
entityId,
hash: utils.randomString(10), // doesn't matter, will force sync, but that's OK
isErased: !!entityRow.isErased,
isErased: false,
utcDateChanged: entityRow.utcDateModified || entityRow.utcDateCreated,
isSynced: entityName !== 'options' || entityRow.isSynced
});
@ -625,15 +621,13 @@ class ConsistencyChecks {
});
this.findAndFixIssues(`
SELECT
id, entityId
FROM
entity_changes
LEFT JOIN ${entityName} ON entityId = ${key}
SELECT id, entityId
FROM entity_changes
LEFT JOIN ${entityName} ON entityId = ${entityName}.${key}
WHERE
entity_changes.isErased = 0
AND entity_changes.entityName = '${entityName}'
AND ${key} IS NULL`,
AND ${entityName}.${key} IS NULL`,
({id, entityId}) => {
if (this.autoFix) {
sql.execute("DELETE FROM entity_changes WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
@ -645,11 +639,9 @@ class ConsistencyChecks {
});
this.findAndFixIssues(`
SELECT
id, entityId
FROM
entity_changes
JOIN ${entityName} ON entityId = ${key}
SELECT id, entityId
FROM entity_changes
JOIN ${entityName} ON entityId = ${entityName}.${key}
WHERE
entity_changes.isErased = 1
AND entity_changes.entityName = '${entityName}'`,

View File

@ -14,7 +14,8 @@ function getEntityHashes() {
const hashRows = sql.getRawRows(`
SELECT entityName,
entityId,
hash
hash,
isErased
FROM entity_changes
WHERE isSynced = 1
AND entityName != 'note_reordering'`);
@ -25,12 +26,17 @@ function getEntityHashes() {
const hashMap = {};
for (const [entityName, entityId, hash] of hashRows) {
for (const [entityName, entityId, hash, isErased] of hashRows) {
const entityHashMap = hashMap[entityName] = hashMap[entityName] || {};
const sector = entityId[0];
entityHashMap[sector] = (entityHashMap[sector] || "") + hash
if (entityName === 'revisions' && sector === '5') {
console.log(entityId, hash, isErased);
}
// if the entity is erased, its hash is not updated, so it has to be added extra
entityHashMap[sector] = (entityHashMap[sector] || "") + hash + isErased;
}
for (const entityHashMap of Object.values(hashMap)) {

View File

@ -3,7 +3,7 @@ const dateUtils = require('./date_utils');
const log = require('./log');
const cls = require('./cls');
const utils = require('./utils');
const instanceId = require('./member_id');
const instanceId = require('./instance_id');
const becca = require("../becca/becca");
const blobService = require("../services/blob");
@ -62,8 +62,6 @@ function moveEntityChangeToTop(entityName, entityId) {
}
function addEntityChangesForSector(entityName, sector) {
const startTime = Date.now();
const entityChanges = sql.getRows(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]);
sql.transactional(() => {
@ -72,7 +70,7 @@ function addEntityChangesForSector(entityName, sector) {
}
});
log.info(`Added sector ${sector} of '${entityName}' (${entityChanges.length} entities) to sync queue in ${Date.now() - startTime}ms.`);
log.info(`Added sector ${sector} of '${entityName}' (${entityChanges.length} entities) to the sync queue.`);
}
function cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey) {
@ -103,39 +101,34 @@ function fillEntityChanges(entityName, entityPrimaryKey, condition = '') {
createdCount++;
let hash;
let utcDateChanged;
let isSynced;
const ec = {
entityName,
entityId,
isErased: false
};
if (entityName === 'blobs') {
const blob = sql.getRow("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]);
hash = blobService.calculateContentHash(blob);
utcDateChanged = blob.utcDateModified;
isSynced = true; // blobs are always synced
ec.hash = blobService.calculateContentHash(blob);
ec.utcDateChanged = blob.utcDateModified;
ec.isSynced = true; // blobs are always synced
} else {
const entity = becca.getEntity(entityName, entityId);
if (entity) {
hash = entity?.generateHash() || "|deleted";
utcDateChanged = entity?.getUtcDateChanged() || dateUtils.utcNowDateTime();
isSynced = entityName !== 'options' || !!entity?.isSynced;
ec.hash = entity.generateHash() || "|deleted";
ec.utcDateChanged = entity.getUtcDateChanged() || dateUtils.utcNowDateTime();
ec.isSynced = entityName !== 'options' || !!entity.isSynced;
} else {
// entity might be null (not present in becca) when it's deleted
// FIXME: hacky, not sure if it might cause some problems
hash = "deleted";
utcDateChanged = dateUtils.utcNowDateTime();
isSynced = true; // deletable (the ones with isDeleted) entities are synced
ec.hash = "deleted";
ec.utcDateChanged = dateUtils.utcNowDateTime();
ec.isSynced = true; // deletable (the ones with isDeleted) entities are synced
}
}
addEntityChange({
entityName,
entityId,
hash: hash,
isErased: false,
utcDateChanged: utcDateChanged,
isSynced: isSynced
});
addEntityChange(ec);
}
if (createdCount > 0) {

View File

@ -37,6 +37,7 @@ function eraseNotes(noteIdsToErase) {
function setEntityChangesAsErased(entityChanges) {
for (const ec of entityChanges) {
ec.isErased = true;
ec.utcDateChanged = dateUtils.utcNowDateTime();
entityChangesService.addEntityChange(ec);
}

View File

@ -3,6 +3,7 @@
const log = require('./log');
const sql = require('./sql');
const protectedSessionService = require("./protected_session");
const dateUtils = require("./date_utils");
/**
* @param {BNote} note
@ -40,7 +41,7 @@ function eraseRevisions(revisionIdsToErase) {
log.info(`Removing note revisions: ${JSON.stringify(revisionIdsToErase)}`);
sql.executeMany(`DELETE FROM revisions WHERE revisionId IN (???)`, revisionIdsToErase);
sql.executeMany(`UPDATE entity_changes SET isErased = 1 WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
sql.executeMany(`UPDATE entity_changes SET isErased = 1, utcDateChanged = '${dateUtils.utcNowDateTime()}' WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
}
module.exports = {

View File

@ -4,7 +4,7 @@ const log = require('./log');
const sql = require('./sql');
const optionService = require('./options');
const utils = require('./utils');
const instanceId = require('./member_id');
const instanceId = require('./instance_id');
const dateUtils = require('./date_utils');
const syncUpdateService = require('./sync_update');
const contentHashService = require('./content_hash');
@ -54,6 +54,7 @@ async function sync() {
});
}
catch (e) {
// we're dynamically switching whether we're using proxy or not based on whether we encountered error with the current method
proxyToggle = !proxyToggle;
if (e.message?.includes('ECONNREFUSED') ||
@ -107,7 +108,7 @@ async function doLogin() {
});
if (resp.instanceId === instanceId) {
throw new Error(`Sync server has member ID '${resp.instanceId}' which is also local. This usually happens when the sync client is (mis)configured to sync with itself (URL points back to client) instead of the correct sync server.`);
throw new Error(`Sync server has instance ID '${resp.instanceId}' which is also local. This usually happens when the sync client is (mis)configured to sync with itself (URL points back to client) instead of the correct sync server.`);
}
syncContext.instanceId = resp.instanceId;
@ -253,7 +254,7 @@ async function checkContentHash(syncContext) {
const failedChecks = contentHashService.checkContentHashes(resp.entityHashes);
if (failedChecks.length > 0) {
// before requeuing sectors, make sure the entity changes are correct
// before re-queuing sectors, make sure the entity changes are correct
const consistencyChecks = require("./consistency_checks");
consistencyChecks.runEntityChangesChecks();
@ -350,7 +351,8 @@ function getEntityChangeRecords(entityChanges) {
length += JSON.stringify(record).length;
if (length > 1000000) {
if (length > 1_000_000) {
// each sync request/response should have at most ~1 MB.
break;
}
}

View File

@ -4,98 +4,83 @@ const entityChangesService = require('./entity_changes');
const eventService = require('./events');
const entityConstructor = require("../becca/entity_constructor");
function updateEntity(entityChange, entityRow, instanceId) {
// can be undefined for options with isSynced=false
if (!entityRow) {
if (entityChange.isSynced) {
if (entityChange.isErased) {
eraseEntity(entityChange, instanceId);
}
else {
log.info(`Encountered synced non-erased entity change without entity: ${JSON.stringify(entityChange)}`);
}
}
else if (entityChange.entityName !== 'options') {
log.info(`Encountered unsynced non-option entity change without entity: ${JSON.stringify(entityChange)}`);
}
return;
function updateEntity(remoteEC, remoteEntityRow, instanceId) {
if (!remoteEntityRow && remoteEC.entityName === 'options') {
return; // can be undefined for options with isSynced=false
}
const updated = entityChange.entityName === 'note_reordering'
? updateNoteReordering(entityChange, entityRow, instanceId)
: updateNormalEntity(entityChange, entityRow, instanceId);
const updated = remoteEC.entityName === 'note_reordering'
? updateNoteReordering(remoteEC, remoteEntityRow, instanceId)
: updateNormalEntity(remoteEC, remoteEntityRow, instanceId);
if (updated) {
if (entityRow.isDeleted) {
if (remoteEntityRow?.isDeleted) {
eventService.emit(eventService.ENTITY_DELETE_SYNCED, {
entityName: entityChange.entityName,
entityId: entityChange.entityId
entityName: remoteEC.entityName,
entityId: remoteEC.entityId
});
}
else if (!entityChange.isErased) {
else if (!remoteEC.isErased) {
eventService.emit(eventService.ENTITY_CHANGE_SYNCED, {
entityName: entityChange.entityName,
entityRow
entityName: remoteEC.entityName,
entityRow: remoteEntityRow
});
}
}
}
function updateNormalEntity(remoteEntityChange, remoteEntityRow, instanceId) {
const localEntityChange = sql.getRow(`
SELECT utcDateChanged, hash, isErased
FROM entity_changes
WHERE entityName = ? AND entityId = ?`, [remoteEntityChange.entityName, remoteEntityChange.entityId]);
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]);
if (localEntityChange && !localEntityChange.isErased && remoteEntityChange.isErased) {
sql.transactional(() => {
const primaryKey = entityConstructor.getEntityFromEntityName(remoteEntityChange.entityName).primaryKeyName;
sql.execute(`DELETE FROM ${remoteEntityChange.entityName} WHERE ${primaryKey} = ?`, remoteEntityChange.entityId);
entityChangesService.addEntityChangeWithInstanceId(remoteEntityChange, instanceId);
});
if (!localEC?.isErased && remoteEC.isErased) {
eraseEntity(remoteEC, instanceId);
return true;
} else if (localEC?.isErased && !remoteEC.isErased) {
// on this side, we can't unerase the entity, so force the entity to be erased on the other side.
entityChangesService.addEntityChangeWithInstanceId(localEC, null);
return false;
}
if (!localEntityChange
|| localEntityChange.utcDateChanged < remoteEntityChange.utcDateChanged
|| localEntityChange.hash !== remoteEntityChange.hash // sync error, we should still update
if (!localEC
|| localEC.utcDateChanged < remoteEC.utcDateChanged
|| (localEC.utcDateChanged === remoteEC.utcDateChanged && localEC.hash !== remoteEC.hash) // sync error, we should still update
) {
if (remoteEntityChange.entityName === 'blobs') {
if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
// is string note or note (syncs can arrive out of order)
remoteEntityRow.content = remoteEntityRow.content === null ? null : Buffer.from(remoteEntityRow.content, 'base64');
remoteEntityRow.content = Buffer.from(remoteEntityRow.content, 'base64');
if (remoteEntityRow.content?.byteLength === 0) {
if (remoteEntityRow.content.byteLength === 0) {
// there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency
// (possibly not a problem anymore with the newer better-sqlite3)
remoteEntityRow.content = "";
}
}
sql.transactional(() => {
sql.replace(remoteEntityChange.entityName, remoteEntityRow);
sql.replace(remoteEC.entityName, remoteEntityRow);
entityChangesService.addEntityChangeWithInstanceId(remoteEntityChange, instanceId);
});
entityChangesService.addEntityChangeWithInstanceId(remoteEC, instanceId);
return true;
} else if (localEC.hash !== remoteEC.hash && localEC.utcDateChanged > remoteEC.utcDateChanged) {
// the change on our side is newer than on the other side, so the other side should update
entityChangesService.addEntityChangeWithInstanceId(localEC, null);
return false;
}
return false;
}
function updateNoteReordering(entityChange, entity, instanceId) {
sql.transactional(() => {
for (const key in entity) {
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [entity[key], key]);
}
function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
for (const key in remoteEntityRow) {
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key], key]);
}
entityChangesService.addEntityChangeWithInstanceId(entityChange, instanceId);
});
entityChangesService.addEntityChangeWithInstanceId(remoteEC, instanceId);
return true;
}
@ -109,19 +94,17 @@ function eraseEntity(entityChange, instanceId) {
"attributes",
"revisions",
"attachments",
"blobs",
"blobs"
];
if (!entityNames.includes(entityName)) {
log.error(`Cannot erase entity '${entityName}', id '${entityId}'`);
log.error(`Cannot erase entity '${entityName}', id '${entityId}'.`);
return;
}
const keyName = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
const primaryKeyName = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName;
sql.execute(`DELETE FROM ${entityName} WHERE ${keyName} = ?`, [entityId]);
eventService.emit(eventService.ENTITY_DELETE_SYNCED, { entityName, entityId });
sql.execute(`DELETE FROM ${entityName} WHERE ${primaryKeyName} = ?`, [entityId]);
entityChangesService.addEntityChangeWithInstanceId(entityChange, instanceId);
}