mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
script can wait until the sync data has been applied
This commit is contained in:
parent
358fd13c8d
commit
2a5ab3a5e1
@ -9,6 +9,7 @@ import noteTooltipService from './note_tooltip.js';
|
||||
import protectedSessionService from './protected_session.js';
|
||||
import dateNotesService from './date_notes.js';
|
||||
import StandardWidget from '../widgets/standard_widget.js';
|
||||
import ws from "./ws.js";
|
||||
|
||||
/**
|
||||
* This is the main frontend API interface for scripts. It's published in the local "api" object.
|
||||
@ -147,6 +148,9 @@ function FrontendScriptApi(startNote, currentNote, originEntity = null, tabConte
|
||||
});
|
||||
|
||||
if (ret.success) {
|
||||
// wait until all the changes done in the script has been synced to frontend before continuing
|
||||
await ws.waitForSyncId(ret.maxSyncId);
|
||||
|
||||
return ret.executionResult;
|
||||
}
|
||||
else {
|
||||
|
@ -53,7 +53,7 @@ function closePersistent(id) {
|
||||
$("#toast-persistent-" + id).toast("dispose");
|
||||
}
|
||||
|
||||
function showMessage(message, delay = 3000) {
|
||||
function showMessage(message, delay = 2000) {
|
||||
console.debug(utils.now(), "message: ", message);
|
||||
|
||||
toast({
|
||||
|
@ -778,18 +778,21 @@ ws.subscribeToMessages(message => {
|
||||
}
|
||||
});
|
||||
|
||||
ws.subscribeToOutsideSyncMessages(syncData => {
|
||||
// this is a synchronous handler - it returns only once the data has been updated
|
||||
ws.subscribeToOutsideSyncMessages(async syncData => {
|
||||
const noteIdsToRefresh = new Set();
|
||||
|
||||
// this has the problem that the former parentNoteId might not be invalidated
|
||||
// and the former location of the branch/note won't be removed.
|
||||
syncData.filter(sync => sync.entityName === 'branches').forEach(sync => noteIdsToRefresh.add(sync.parentNoteId));
|
||||
|
||||
syncData.filter(sync => sync.entityName === 'notes').forEach(sync => noteIdsToRefresh.add(sync.noteId));
|
||||
syncData.filter(sync => sync.entityName === 'notes').forEach(sync => noteIdsToRefresh.add(sync.entityId));
|
||||
|
||||
syncData.filter(sync => sync.entityName === 'note_reordering').forEach(sync => noteIdsToRefresh.add(sync.entityId));
|
||||
|
||||
reloadNotes(Array.from(noteIdsToRefresh));
|
||||
if (noteIdsToRefresh.size > 0) {
|
||||
await reloadNotes(Array.from(noteIdsToRefresh));
|
||||
}
|
||||
});
|
||||
|
||||
utils.bindGlobalShortcut('ctrl+o', async () => {
|
||||
@ -827,6 +830,12 @@ async function checkFolderStatus(node) {
|
||||
}
|
||||
|
||||
async function reloadNotes(noteIds) {
|
||||
if (noteIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.debug("Reloading notes", noteIds);
|
||||
|
||||
await treeCache.reloadNotesAndTheirChildren(noteIds);
|
||||
|
||||
const activeNotePath = noteDetailService.getActiveTabNotePath();
|
||||
@ -843,7 +852,7 @@ async function reloadNotes(noteIds) {
|
||||
const node = await getNodeFromPath(activeNotePath);
|
||||
|
||||
if (node) {
|
||||
node.setActive(true, {noEvents: true}); // this node has been already active so no need to fire events again
|
||||
await node.setActive(true, {noEvents: true}); // this node has been already active so no need to fire events again
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9,8 +9,9 @@ const outsideSyncMessageHandlers = [];
|
||||
const messageHandlers = [];
|
||||
|
||||
let ws;
|
||||
let lastSyncId;
|
||||
let lastSyncId = window.glob.maxSyncIdAtLoad;
|
||||
let lastPingTs;
|
||||
let syncDataQueue = [];
|
||||
|
||||
function logError(message) {
|
||||
console.log(utils.now(), message); // needs to be separate from .trace()
|
||||
@ -36,7 +37,10 @@ function subscribeToAllSyncMessages(messageHandler) {
|
||||
allSyncMessageHandlers.push(messageHandler);
|
||||
}
|
||||
|
||||
function handleMessage(event) {
|
||||
// used to serialize sync operations
|
||||
let consumeQueuePromise = null;
|
||||
|
||||
async function handleMessage(event) {
|
||||
const message = JSON.parse(event.data);
|
||||
|
||||
for (const messageHandler of messageHandlers) {
|
||||
@ -46,23 +50,26 @@ function handleMessage(event) {
|
||||
if (message.type === 'sync') {
|
||||
lastPingTs = Date.now();
|
||||
|
||||
$outstandingSyncsCount.html(message.outstandingSyncs);
|
||||
|
||||
if (message.data.length > 0) {
|
||||
console.debug(utils.now(), "Sync data: ", message.data);
|
||||
|
||||
lastSyncId = message.data[message.data.length - 1].id;
|
||||
syncDataQueue.push(...message.data);
|
||||
|
||||
// first wait for all the preceding consumers to finish
|
||||
while (consumeQueuePromise) {
|
||||
await consumeQueuePromise;
|
||||
}
|
||||
|
||||
// it's my turn so start it up
|
||||
consumeQueuePromise = consumeSyncData();
|
||||
|
||||
await consumeQueuePromise;
|
||||
|
||||
// finish and set to null to signal somebody else can pick it up
|
||||
consumeQueuePromise = null;
|
||||
}
|
||||
|
||||
for (const syncMessageHandler of allSyncMessageHandlers) {
|
||||
syncMessageHandler(message.data);
|
||||
}
|
||||
|
||||
const syncData = message.data.filter(sync => sync.sourceId !== glob.sourceId);
|
||||
|
||||
for (const syncMessageHandler of outsideSyncMessageHandlers) {
|
||||
syncMessageHandler(syncData);
|
||||
}
|
||||
|
||||
$outstandingSyncsCount.html(message.outstandingSyncs);
|
||||
}
|
||||
else if (message.type === 'sync-hash-check-failed') {
|
||||
toastService.showError("Sync check failed!", 60000);
|
||||
@ -72,6 +79,47 @@ function handleMessage(event) {
|
||||
}
|
||||
}
|
||||
|
||||
let syncIdReachedListeners = [];
|
||||
|
||||
function waitForSyncId(desiredSyncId) {
|
||||
console.log("Waiting for ", desiredSyncId);
|
||||
|
||||
if (desiredSyncId <= lastSyncId) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return new Promise((res, rej) => {
|
||||
syncIdReachedListeners.push({
|
||||
desiredSyncId,
|
||||
resolvePromise: res
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
async function consumeSyncData() {
|
||||
if (syncDataQueue.length >= 0) {
|
||||
const allSyncData = syncDataQueue;
|
||||
syncDataQueue = [];
|
||||
|
||||
const outsideSyncData = allSyncData.filter(sync => sync.sourceId !== glob.sourceId);
|
||||
|
||||
// the update process should be synchronous as a whole but individual handlers can run in parallel
|
||||
await Promise.all([
|
||||
...allSyncMessageHandlers.map(syncHandler => syncHandler(allSyncData)),
|
||||
...outsideSyncMessageHandlers.map(syncHandler => syncHandler(outsideSyncData))
|
||||
]);
|
||||
|
||||
lastSyncId = allSyncData[allSyncData.length - 1].id;
|
||||
}
|
||||
|
||||
syncIdReachedListeners
|
||||
.filter(l => l.desiredSyncId <= lastSyncId)
|
||||
.forEach(l => l.resolvePromise());
|
||||
|
||||
syncIdReachedListeners = syncIdReachedListeners
|
||||
.filter(l => l.desiredSyncId > lastSyncId);
|
||||
}
|
||||
|
||||
function connectWebSocket() {
|
||||
const protocol = document.location.protocol === 'https:' ? 'wss' : 'ws';
|
||||
|
||||
@ -113,5 +161,6 @@ export default {
|
||||
logError,
|
||||
subscribeToMessages,
|
||||
subscribeToAllSyncMessages,
|
||||
subscribeToOutsideSyncMessages
|
||||
subscribeToOutsideSyncMessages,
|
||||
waitForSyncId
|
||||
};
|
@ -3,13 +3,18 @@
|
||||
const scriptService = require('../../services/script');
|
||||
const attributeService = require('../../services/attributes');
|
||||
const repository = require('../../services/repository');
|
||||
const syncService = require('../../services/sync');
|
||||
|
||||
async function exec(req) {
|
||||
try {
|
||||
const result = await scriptService.executeScript(req.body.script, req.body.params, req.body.startNoteId,
|
||||
req.body.currentNoteId, req.body.originEntityName, req.body.originEntityId);
|
||||
|
||||
return { success: true, executionResult: result };
|
||||
return {
|
||||
success: true,
|
||||
executionResult: result,
|
||||
maxSyncId: await syncService.getMaxSyncId()
|
||||
};
|
||||
}
|
||||
catch (e) {
|
||||
return { success: false, error: e.message };
|
||||
|
@ -327,6 +327,10 @@ async function updatePushStats() {
|
||||
}
|
||||
}
|
||||
|
||||
async function getMaxSyncId() {
|
||||
return await sql.getValue('SELECT MAX(id) FROM sync');
|
||||
}
|
||||
|
||||
sqlInit.dbReady.then(async () => {
|
||||
setInterval(cls.wrap(sync), 60000);
|
||||
|
||||
@ -340,5 +344,6 @@ module.exports = {
|
||||
sync,
|
||||
login,
|
||||
getSyncRecords,
|
||||
stats
|
||||
stats,
|
||||
getMaxSyncId
|
||||
};
|
Loading…
x
Reference in New Issue
Block a user