mirror of
https://github.com/zadam/trilium.git
synced 2025-06-06 18:08:33 +02:00
Merge branch 'beta'
# Conflicts: # docs/backend_api/BAttachment.html # docs/backend_api/BNote.html # docs/backend_api/becca_entities_bnote.js.html # docs/frontend_api/FAttribute.html # docs/frontend_api/FBranch.html # docs/frontend_api/FNote.html # docs/frontend_api/FrontendScriptApi.html # docs/frontend_api/entities_fattachment.js.html # docs/frontend_api/entities_fattribute.js.html # docs/frontend_api/entities_fblob.js.html # docs/frontend_api/entities_fbranch.js.html # docs/frontend_api/entities_fnote.js.html # docs/frontend_api/global.html # docs/frontend_api/index.html # docs/frontend_api/services_frontend_script_api.js.html # package-lock.json
This commit is contained in:
commit
af24758ad4
@ -2,7 +2,7 @@ image:
|
|||||||
file: .gitpod.dockerfile
|
file: .gitpod.dockerfile
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- before: nvm install 18.16.1 && nvm use 18.16.1
|
- before: nvm install 18.18.0 && nvm use 18.18.0
|
||||||
init: npm install
|
init: npm install
|
||||||
command: npm run start-server
|
command: npm run start-server
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# !!! Don't try to build this Dockerfile directly, run it through bin/build-docker.sh script !!!
|
# !!! Don't try to build this Dockerfile directly, run it through bin/build-docker.sh script !!!
|
||||||
FROM node:18.16.1-alpine
|
FROM node:18.18.0-alpine
|
||||||
|
|
||||||
# Create app directory
|
# Create app directory
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
|
@ -5,18 +5,3 @@ echo "Packaging debian x64 distribution..."
|
|||||||
VERSION=`jq -r ".version" package.json`
|
VERSION=`jq -r ".version" package.json`
|
||||||
|
|
||||||
./node_modules/.bin/electron-installer-debian --config bin/deb-options.json --options.version=${VERSION} --arch amd64
|
./node_modules/.bin/electron-installer-debian --config bin/deb-options.json --options.version=${VERSION} --arch amd64
|
||||||
|
|
||||||
|
|
||||||
# hacky stop-gag measure to produce debian compatible XZ compressed debs until this is fixed: https://github.com/electron-userland/electron-installer-debian/issues/272
|
|
||||||
cd dist
|
|
||||||
ar x trilium_${VERSION}_amd64.deb
|
|
||||||
rm trilium_${VERSION}_amd64.deb
|
|
||||||
# recompress
|
|
||||||
< control.tar.zst zstd -d | xz > control.tar.xz
|
|
||||||
< data.tar.zst zstd -d | xz > data.tar.xz
|
|
||||||
# create deb archive (I really do not know, what argument "sdsd" is for but something is required for ar to create the archive as desired)
|
|
||||||
ar -m -c -a sdsd trilium_${VERSION}_amd64.deb debian-binary control.tar.xz data.tar.xz
|
|
||||||
|
|
||||||
rm control* data* debian-binary
|
|
||||||
|
|
||||||
echo "Converted to XZ deb"
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
PKG_DIR=dist/trilium-linux-x64-server
|
PKG_DIR=dist/trilium-linux-x64-server
|
||||||
NODE_VERSION=18.16.1
|
NODE_VERSION=18.18.0
|
||||||
|
|
||||||
if [ "$1" != "DONTCOPY" ]
|
if [ "$1" != "DONTCOPY" ]
|
||||||
then
|
then
|
||||||
|
@ -5,7 +5,7 @@ if [[ $# -eq 0 ]] ; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
n exec 18.16.1 npm run webpack
|
n exec 18.18.0 npm run webpack
|
||||||
|
|
||||||
DIR=$1
|
DIR=$1
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ cp -r electron.js $DIR/
|
|||||||
cp webpack-* $DIR/
|
cp webpack-* $DIR/
|
||||||
|
|
||||||
# run in subshell (so we return to original dir)
|
# run in subshell (so we return to original dir)
|
||||||
(cd $DIR && n exec 18.16.1 npm install --only=prod)
|
(cd $DIR && n exec 18.18.0 npm install --only=prod)
|
||||||
|
|
||||||
# cleanup of useless files in dependencies
|
# cleanup of useless files in dependencies
|
||||||
rm -r $DIR/node_modules/image-q/demo
|
rm -r $DIR/node_modules/image-q/demo
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"src": "dist/trilium-linux-x64",
|
"src": "dist/trilium-linux-x64",
|
||||||
"dest": "dist/",
|
"dest": "dist/",
|
||||||
|
"compression": "xz",
|
||||||
"name": "trilium",
|
"name": "trilium",
|
||||||
"productName": "Trilium Notes",
|
"productName": "Trilium Notes",
|
||||||
"genericName": "Note taker",
|
"genericName": "Note taker",
|
||||||
@ -11,4 +12,4 @@
|
|||||||
"bin": "trilium",
|
"bin": "trilium",
|
||||||
"icon": "dist/trilium-linux-x64/icon.png",
|
"icon": "dist/trilium-linux-x64/icon.png",
|
||||||
"categories": [ "Office" ]
|
"categories": [ "Office" ]
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,7 @@ const LOG_ALL_QUERIES = false;
|
|||||||
});
|
});
|
||||||
|
|
||||||
function insert(tableName, rec, replace = false) {
|
function insert(tableName, rec, replace = false) {
|
||||||
const keys = Object.keys(rec);
|
const keys = Object.keys(rec || {});
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
log.error(`Can't insert empty object into table ${tableName}`);
|
log.error(`Can't insert empty object into table ${tableName}`);
|
||||||
return;
|
return;
|
||||||
@ -81,7 +81,7 @@ function replace(tableName, rec) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function upsert(tableName, primaryKey, rec) {
|
function upsert(tableName, primaryKey, rec) {
|
||||||
const keys = Object.keys(rec);
|
const keys = Object.keys(rec || {});
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
log.error(`Can't upsert empty object into table ${tableName}`);
|
log.error(`Can't upsert empty object into table ${tableName}`);
|
||||||
return;
|
return;
|
||||||
|
@ -13,6 +13,9 @@ appIconService.installLocalAppIcon();
|
|||||||
|
|
||||||
require('electron-dl')({ saveAs: true });
|
require('electron-dl')({ saveAs: true });
|
||||||
|
|
||||||
|
// needed for excalidraw export https://github.com/zadam/trilium/issues/4271
|
||||||
|
app.commandLine.appendSwitch("enable-experimental-web-platform-features");
|
||||||
|
|
||||||
// Quit when all windows are closed, except on macOS. There, it's common
|
// Quit when all windows are closed, except on macOS. There, it's common
|
||||||
// for applications and their menu bar to stay active until the user quits
|
// for applications and their menu bar to stay active until the user quits
|
||||||
// explicitly with Cmd + Q.
|
// explicitly with Cmd + Q.
|
||||||
|
293
libraries/mermaid.min.js
vendored
293
libraries/mermaid.min.js
vendored
File diff suppressed because one or more lines are too long
14
package.json
14
package.json
@ -2,7 +2,7 @@
|
|||||||
"name": "trilium",
|
"name": "trilium",
|
||||||
"productName": "Trilium Notes",
|
"productName": "Trilium Notes",
|
||||||
"description": "Trilium Notes",
|
"description": "Trilium Notes",
|
||||||
"version": "0.61.6-beta",
|
"version": "0.61.7-beta",
|
||||||
"license": "AGPL-3.0-only",
|
"license": "AGPL-3.0-only",
|
||||||
"main": "electron.js",
|
"main": "electron.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
@ -33,7 +33,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@braintree/sanitize-url": "6.0.4",
|
"@braintree/sanitize-url": "6.0.4",
|
||||||
"@electron/remote": "2.0.11",
|
"@electron/remote": "2.0.11",
|
||||||
"@excalidraw/excalidraw": "0.15.3",
|
"@excalidraw/excalidraw": "0.16.1",
|
||||||
"archiver": "5.3.1",
|
"archiver": "5.3.1",
|
||||||
"async-mutex": "0.4.0",
|
"async-mutex": "0.4.0",
|
||||||
"axios": "1.5.0",
|
"axios": "1.5.0",
|
||||||
@ -43,7 +43,7 @@
|
|||||||
"compression": "1.7.4",
|
"compression": "1.7.4",
|
||||||
"cookie-parser": "1.4.6",
|
"cookie-parser": "1.4.6",
|
||||||
"csurf": "1.11.0",
|
"csurf": "1.11.0",
|
||||||
"dayjs": "1.11.9",
|
"dayjs": "1.11.10",
|
||||||
"dayjs-plugin-utc": "0.1.2",
|
"dayjs-plugin-utc": "0.1.2",
|
||||||
"debounce": "1.2.1",
|
"debounce": "1.2.1",
|
||||||
"ejs": "3.1.9",
|
"ejs": "3.1.9",
|
||||||
@ -68,7 +68,7 @@
|
|||||||
"jimp": "0.22.10",
|
"jimp": "0.22.10",
|
||||||
"joplin-turndown-plugin-gfm": "1.0.12",
|
"joplin-turndown-plugin-gfm": "1.0.12",
|
||||||
"jsdom": "22.1.0",
|
"jsdom": "22.1.0",
|
||||||
"marked": "9.0.0",
|
"marked": "9.0.3",
|
||||||
"mime-types": "2.1.35",
|
"mime-types": "2.1.35",
|
||||||
"multer": "1.4.5-lts.1",
|
"multer": "1.4.5-lts.1",
|
||||||
"node-abi": "3.47.0",
|
"node-abi": "3.47.0",
|
||||||
@ -91,17 +91,17 @@
|
|||||||
"tmp": "0.2.1",
|
"tmp": "0.2.1",
|
||||||
"turndown": "7.1.2",
|
"turndown": "7.1.2",
|
||||||
"unescape": "1.0.1",
|
"unescape": "1.0.1",
|
||||||
"ws": "8.14.1",
|
"ws": "8.14.2",
|
||||||
"xml2js": "0.6.2",
|
"xml2js": "0.6.2",
|
||||||
"yauzl": "2.10.0"
|
"yauzl": "2.10.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"cross-env": "7.0.3",
|
"cross-env": "7.0.3",
|
||||||
"electron": "25.8.1",
|
"electron": "25.8.2",
|
||||||
"electron-builder": "24.6.4",
|
"electron-builder": "24.6.4",
|
||||||
"electron-packager": "17.1.2",
|
"electron-packager": "17.1.2",
|
||||||
"electron-rebuild": "3.2.9",
|
"electron-rebuild": "3.2.9",
|
||||||
"eslint": "8.48.0",
|
"eslint": "8.49.0",
|
||||||
"eslint-config-airbnb-base": "15.0.0",
|
"eslint-config-airbnb-base": "15.0.0",
|
||||||
"eslint-config-prettier": "9.0.0",
|
"eslint-config-prettier": "9.0.0",
|
||||||
"eslint-plugin-import": "2.28.1",
|
"eslint-plugin-import": "2.28.1",
|
||||||
|
@ -14,6 +14,7 @@ import NoteContextAwareWidget from "../widgets/note_context_aware_widget.js";
|
|||||||
import BasicWidget from "../widgets/basic_widget.js";
|
import BasicWidget from "../widgets/basic_widget.js";
|
||||||
import SpacedUpdate from "./spaced_update.js";
|
import SpacedUpdate from "./spaced_update.js";
|
||||||
import shortcutService from "./shortcuts.js";
|
import shortcutService from "./shortcuts.js";
|
||||||
|
import dialogService from "./dialog.js";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -288,7 +289,7 @@ function FrontendScriptApi(startNote, currentNote, originEntity = null, $contain
|
|||||||
this.parseDate = utils.parseDate;
|
this.parseDate = utils.parseDate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show an info message to the user.
|
* Show an info toast message to the user.
|
||||||
*
|
*
|
||||||
* @method
|
* @method
|
||||||
* @param {string} message
|
* @param {string} message
|
||||||
@ -296,13 +297,43 @@ function FrontendScriptApi(startNote, currentNote, originEntity = null, $contain
|
|||||||
this.showMessage = toastService.showMessage;
|
this.showMessage = toastService.showMessage;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show an error message to the user.
|
* Show an error toast message to the user.
|
||||||
*
|
*
|
||||||
* @method
|
* @method
|
||||||
* @param {string} message
|
* @param {string} message
|
||||||
*/
|
*/
|
||||||
this.showError = toastService.showError;
|
this.showError = toastService.showError;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show an info dialog to the user.
|
||||||
|
*
|
||||||
|
* @method
|
||||||
|
* @param {string} message
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
this.showInfoDialog = dialogService.info;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show confirm dialog to the user.
|
||||||
|
*
|
||||||
|
* @method
|
||||||
|
* @param {string} message
|
||||||
|
* @returns {Promise<boolean>} promise resolving to true if the user confirmed
|
||||||
|
*/
|
||||||
|
this.showConfirmDialog = dialogService.confirm;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show prompt dialog to the user.
|
||||||
|
*
|
||||||
|
* @method
|
||||||
|
* @param {object} props
|
||||||
|
* @param {string} props.title
|
||||||
|
* @param {string} props.message
|
||||||
|
* @param {string} props.defaultValue
|
||||||
|
* @returns {Promise<string>} promise resolving to the answer provided by the user
|
||||||
|
*/
|
||||||
|
this.showPromptDialog = dialogService.prompt;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Trigger command. This is a very low-level API which should be avoided if possible.
|
* Trigger command. This is a very low-level API which should be avoided if possible.
|
||||||
*
|
*
|
||||||
|
@ -20,10 +20,13 @@ const TPL = `
|
|||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
.excalidraw-wrapper {
|
.excalidraw-wrapper {
|
||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.excalidraw button[data-testid="json-export-button"] {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
:root[dir="ltr"]
|
:root[dir="ltr"]
|
||||||
.excalidraw
|
.excalidraw
|
||||||
@ -60,8 +63,8 @@ const TPL = `
|
|||||||
* for sketching. Excalidraw has a vibrant and active community.
|
* for sketching. Excalidraw has a vibrant and active community.
|
||||||
*
|
*
|
||||||
* Functionality:
|
* Functionality:
|
||||||
* We store the excalidraw assets (elements, appState, files) in the note. In addition to that, we
|
* We store the excalidraw assets (elements and files) in the note. In addition to that, we
|
||||||
* export the SVG from the canvas on every update. The SVG is also saved in the note. It is used when
|
* export the SVG from the canvas on every update and store it in the note's attachment. It is used when
|
||||||
* calling api/images and makes referencing very easy.
|
* calling api/images and makes referencing very easy.
|
||||||
*
|
*
|
||||||
* Paths not taken.
|
* Paths not taken.
|
||||||
@ -209,19 +212,15 @@ export default class ExcalidrawTypeWidget extends TypeWidget {
|
|||||||
|
|
||||||
content = {
|
content = {
|
||||||
elements: [],
|
elements: [],
|
||||||
appState: {},
|
|
||||||
files: [],
|
files: [],
|
||||||
|
appState: {}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const {elements, appState, files} = content;
|
const {elements, files, appState} = content;
|
||||||
|
|
||||||
appState.theme = this.themeStyle;
|
appState.theme = this.themeStyle;
|
||||||
|
|
||||||
/**
|
|
||||||
* use widths and offsets of current view, since stored appState has the state from
|
|
||||||
* previous edit. using the stored state would lead to pointer mismatch.
|
|
||||||
*/
|
|
||||||
const boundingClientRect = this.excalidrawWrapperRef.current.getBoundingClientRect();
|
const boundingClientRect = this.excalidrawWrapperRef.current.getBoundingClientRect();
|
||||||
appState.width = boundingClientRect.width;
|
appState.width = boundingClientRect.width;
|
||||||
appState.height = boundingClientRect.height;
|
appState.height = boundingClientRect.height;
|
||||||
@ -284,10 +283,7 @@ export default class ExcalidrawTypeWidget extends TypeWidget {
|
|||||||
*/
|
*/
|
||||||
const files = this.excalidrawRef.current.getFiles();
|
const files = this.excalidrawRef.current.getFiles();
|
||||||
|
|
||||||
/**
|
// parallel svg export to combat bitrot and enable rendering image for note inclusion, preview, and share
|
||||||
* parallel svg export to combat bitrot and enable rendering image for note inclusion,
|
|
||||||
* preview, and share.
|
|
||||||
*/
|
|
||||||
const svg = await window.ExcalidrawLib.exportToSvg({
|
const svg = await window.ExcalidrawLib.exportToSvg({
|
||||||
elements,
|
elements,
|
||||||
appState,
|
appState,
|
||||||
@ -302,14 +298,18 @@ export default class ExcalidrawTypeWidget extends TypeWidget {
|
|||||||
if (element.fileId) {
|
if (element.fileId) {
|
||||||
activeFiles[element.fileId] = files[element.fileId];
|
activeFiles[element.fileId] = files[element.fileId];
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
|
|
||||||
const content = {
|
const content = {
|
||||||
type: "excalidraw",
|
type: "excalidraw",
|
||||||
version: 2,
|
version: 2,
|
||||||
elements,
|
elements,
|
||||||
appState,
|
files: activeFiles,
|
||||||
files: activeFiles
|
appState: {
|
||||||
|
scrollX: appState.scrollX,
|
||||||
|
scrollY: appState.scrollY,
|
||||||
|
zoom: appState.zoom
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const attachments = [
|
const attachments = [
|
||||||
@ -339,7 +339,7 @@ export default class ExcalidrawTypeWidget extends TypeWidget {
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
content: JSON.stringify(content),
|
content: JSON.stringify(content),
|
||||||
attachments: attachments
|
attachments
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -458,6 +458,10 @@ export default class ExcalidrawTypeWidget extends TypeWidget {
|
|||||||
handleKeyboardGlobally: false,
|
handleKeyboardGlobally: false,
|
||||||
autoFocus: false,
|
autoFocus: false,
|
||||||
onLinkOpen,
|
onLinkOpen,
|
||||||
|
UIOptions: {
|
||||||
|
saveToActiveFile: false,
|
||||||
|
saveAsImage: false
|
||||||
|
}
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -166,11 +166,7 @@ function update(req) {
|
|||||||
|
|
||||||
const {entities, instanceId} = body;
|
const {entities, instanceId} = body;
|
||||||
|
|
||||||
sql.transactional(() => {
|
sql.transactional(() => syncUpdateService.updateEntities(entities, instanceId));
|
||||||
for (const {entityChange, entity} of entities) {
|
|
||||||
syncUpdateService.updateEntity(entityChange, entity, instanceId);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setInterval(() => {
|
setInterval(() => {
|
||||||
|
@ -1 +1 @@
|
|||||||
module.exports = { buildDate:"2023-09-06T23:57:29+02:00", buildRevision: "6fa9d996e84f87fcb73c3388a5170affd2c2f7cc" };
|
module.exports = { buildDate:"2023-09-21T23:38:18+02:00", buildRevision: "79e5e3b65ff613cdb81e2afaa832037ccf06d7b8" };
|
||||||
|
@ -12,6 +12,7 @@ const BBranch = require('../becca/entities/bbranch');
|
|||||||
const revisionService = require('./revisions');
|
const revisionService = require('./revisions');
|
||||||
const becca = require("../becca/becca");
|
const becca = require("../becca/becca");
|
||||||
const utils = require("../services/utils");
|
const utils = require("../services/utils");
|
||||||
|
const eraseService = require("../services/erase");
|
||||||
const {sanitizeAttributeName} = require("./sanitize_attribute_name");
|
const {sanitizeAttributeName} = require("./sanitize_attribute_name");
|
||||||
const noteTypes = require("../services/note_types").getNoteTypeNames();
|
const noteTypes = require("../services/note_types").getNoteTypeNames();
|
||||||
|
|
||||||
@ -440,7 +441,7 @@ class ConsistencyChecks {
|
|||||||
this.findAndFixIssues(`
|
this.findAndFixIssues(`
|
||||||
SELECT notes.noteId, notes.type, notes.mime
|
SELECT notes.noteId, notes.type, notes.mime
|
||||||
FROM notes
|
FROM notes
|
||||||
JOIN blobs USING (blobId)
|
JOIN blobs USING (blobId)
|
||||||
WHERE isDeleted = 0
|
WHERE isDeleted = 0
|
||||||
AND isProtected = 0
|
AND isProtected = 0
|
||||||
AND content IS NULL`,
|
AND content IS NULL`,
|
||||||
@ -460,19 +461,36 @@ class ConsistencyChecks {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.findAndFixIssues(`
|
this.findAndFixIssues(`
|
||||||
SELECT revisions.revisionId
|
SELECT revisions.revisionId, blobs.blobId
|
||||||
FROM revisions
|
FROM revisions
|
||||||
LEFT JOIN blobs USING (blobId)
|
LEFT JOIN blobs USING (blobId)
|
||||||
WHERE blobs.blobId IS NULL`,
|
WHERE blobs.blobId IS NULL`,
|
||||||
({revisionId}) => {
|
({revisionId, blobId}) => {
|
||||||
if (this.autoFix) {
|
if (this.autoFix) {
|
||||||
revisionService.eraseRevisions([revisionId]);
|
revisionService.eraseRevisions([revisionId]);
|
||||||
|
|
||||||
this.reloadNeeded = true;
|
this.reloadNeeded = true;
|
||||||
|
|
||||||
logFix(`Note revision content '${revisionId}' was set to erased since its content did not exist.`);
|
logFix(`Note revision '${revisionId}' was erased since the referenced blob '${blobId}' did not exist.`);
|
||||||
} else {
|
} else {
|
||||||
logError(`Note revision content '${revisionId}' does not exist`);
|
logError(`Note revision '${revisionId}' blob '${blobId}' does not exist`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.findAndFixIssues(`
|
||||||
|
SELECT attachments.attachmentId, blobs.blobId
|
||||||
|
FROM attachments
|
||||||
|
LEFT JOIN blobs USING (blobId)
|
||||||
|
WHERE blobs.blobId IS NULL`,
|
||||||
|
({attachmentId, blobId}) => {
|
||||||
|
if (this.autoFix) {
|
||||||
|
eraseService.eraseAttachments([attachmentId]);
|
||||||
|
|
||||||
|
this.reloadNeeded = true;
|
||||||
|
|
||||||
|
logFix(`Attachment '${attachmentId}' was erased since the referenced blob '${blobId}' did not exist.`);
|
||||||
|
} else {
|
||||||
|
logError(`Attachment '${attachmentId}' blob '${blobId}' does not exist`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -183,5 +183,6 @@ module.exports = {
|
|||||||
eraseDeletedNotesNow,
|
eraseDeletedNotesNow,
|
||||||
eraseUnusedAttachmentsNow,
|
eraseUnusedAttachmentsNow,
|
||||||
eraseNotesWithDeleteId,
|
eraseNotesWithDeleteId,
|
||||||
eraseUnusedBlobs
|
eraseUnusedBlobs,
|
||||||
|
eraseAttachments
|
||||||
};
|
};
|
||||||
|
@ -895,6 +895,10 @@ async function asyncPostProcessContent(note, content) {
|
|||||||
|
|
||||||
// all keys should be replaced by the corresponding values
|
// all keys should be replaced by the corresponding values
|
||||||
function replaceByMap(str, mapObj) {
|
function replaceByMap(str, mapObj) {
|
||||||
|
if (!mapObj) {
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
const re = new RegExp(Object.keys(mapObj).join("|"),"g");
|
const re = new RegExp(Object.keys(mapObj).join("|"),"g");
|
||||||
|
|
||||||
return str.replace(re, matched => mapObj[matched]);
|
return str.replace(re, matched => mapObj[matched]);
|
||||||
|
@ -51,7 +51,7 @@ function eraseRevisions(revisionIdsToErase) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(`Removing note revisions: ${JSON.stringify(revisionIdsToErase)}`);
|
log.info(`Removing revisions: ${JSON.stringify(revisionIdsToErase)}`);
|
||||||
|
|
||||||
sql.executeMany(`DELETE FROM revisions WHERE revisionId IN (???)`, revisionIdsToErase);
|
sql.executeMany(`DELETE FROM revisions WHERE revisionId IN (???)`, revisionIdsToErase);
|
||||||
sql.executeMany(`UPDATE entity_changes SET isErased = 1, utcDateChanged = '${dateUtils.utcNowDateTime()}' WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
|
sql.executeMany(`UPDATE entity_changes SET isErased = 1, utcDateChanged = '${dateUtils.utcNowDateTime()}' WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
|
||||||
|
@ -93,7 +93,7 @@ async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) {
|
|||||||
return { result: 'success' };
|
return { result: 'success' };
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
log.error(`Sync failed: ${e.message}`);
|
log.error(`Sync failed: '${e.message}', stack: ${e.stack}`);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
result: 'failure',
|
result: 'failure',
|
||||||
|
@ -26,7 +26,7 @@ const LOG_ALL_QUERIES = false;
|
|||||||
});
|
});
|
||||||
|
|
||||||
function insert(tableName, rec, replace = false) {
|
function insert(tableName, rec, replace = false) {
|
||||||
const keys = Object.keys(rec);
|
const keys = Object.keys(rec || {});
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
log.error(`Can't insert empty object into table ${tableName}`);
|
log.error(`Can't insert empty object into table ${tableName}`);
|
||||||
return;
|
return;
|
||||||
@ -53,7 +53,7 @@ function replace(tableName, rec) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function upsert(tableName, primaryKey, rec) {
|
function upsert(tableName, primaryKey, rec) {
|
||||||
const keys = Object.keys(rec);
|
const keys = Object.keys(rec || {});
|
||||||
if (keys.length === 0) {
|
if (keys.length === 0) {
|
||||||
log.error(`Can't upsert empty object into table ${tableName}`);
|
log.error(`Can't upsert empty object into table ${tableName}`);
|
||||||
return;
|
return;
|
||||||
|
@ -71,8 +71,7 @@ async function sync() {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
log.info(`sync failed: ${e.message}
|
log.info(`Sync failed: '${e.message}', stack: ${e.stack}`);
|
||||||
stack: ${e.stack}`);
|
|
||||||
|
|
||||||
ws.syncFailed();
|
ws.syncFailed();
|
||||||
|
|
||||||
@ -127,8 +126,6 @@ async function doLogin() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function pullChanges(syncContext) {
|
async function pullChanges(syncContext) {
|
||||||
let atLeastOnePullApplied = false;
|
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const lastSyncedPull = getLastSyncedPull();
|
const lastSyncedPull = getLastSyncedPull();
|
||||||
const logMarkerId = utils.randomString(10); // to easily pair sync events between client and server logs
|
const logMarkerId = utils.randomString(10); // to easily pair sync events between client and server logs
|
||||||
@ -144,22 +141,7 @@ async function pullChanges(syncContext) {
|
|||||||
const pulledDate = Date.now();
|
const pulledDate = Date.now();
|
||||||
|
|
||||||
sql.transactional(() => {
|
sql.transactional(() => {
|
||||||
for (const {entityChange, entity} of entityChanges) {
|
syncUpdateService.updateEntities(entityChanges, syncContext.instanceId);
|
||||||
const changeAppliedAlready = entityChange.changeId
|
|
||||||
&& !!sql.getValue("SELECT 1 FROM entity_changes WHERE changeId = ?", [entityChange.changeId]);
|
|
||||||
|
|
||||||
if (changeAppliedAlready) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!atLeastOnePullApplied) { // send only for first
|
|
||||||
ws.syncPullInProgress();
|
|
||||||
|
|
||||||
atLeastOnePullApplied = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
syncUpdateService.updateEntity(entityChange, entity, syncContext.instanceId);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (lastSyncedPull !== lastEntityChangeId) {
|
if (lastSyncedPull !== lastEntityChangeId) {
|
||||||
setLastSyncedPull(lastEntityChangeId);
|
setLastSyncedPull(lastEntityChangeId);
|
||||||
|
@ -3,15 +3,51 @@ const log = require('./log');
|
|||||||
const entityChangesService = require('./entity_changes');
|
const entityChangesService = require('./entity_changes');
|
||||||
const eventService = require('./events');
|
const eventService = require('./events');
|
||||||
const entityConstructor = require("../becca/entity_constructor");
|
const entityConstructor = require("../becca/entity_constructor");
|
||||||
|
const ws = require("./ws");
|
||||||
|
|
||||||
function updateEntity(remoteEC, remoteEntityRow, instanceId) {
|
function updateEntities(entityChanges, instanceId) {
|
||||||
|
if (entityChanges.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let atLeastOnePullApplied = false;
|
||||||
|
const updateContext = {
|
||||||
|
updated: {},
|
||||||
|
alreadyUpdated: 0,
|
||||||
|
erased: 0,
|
||||||
|
alreadyErased: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const {entityChange, entity} of entityChanges) {
|
||||||
|
const changeAppliedAlready = entityChange.changeId
|
||||||
|
&& !!sql.getValue("SELECT 1 FROM entity_changes WHERE changeId = ?", [entityChange.changeId]);
|
||||||
|
|
||||||
|
if (changeAppliedAlready) {
|
||||||
|
updateContext.alreadyUpdated++;
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!atLeastOnePullApplied) { // avoid spamming and send only for first
|
||||||
|
ws.syncPullInProgress();
|
||||||
|
|
||||||
|
atLeastOnePullApplied = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
updateEntity(entityChange, entity, instanceId, updateContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
logUpdateContext(updateContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateEntity(remoteEC, remoteEntityRow, instanceId, updateContext) {
|
||||||
if (!remoteEntityRow && remoteEC.entityName === 'options') {
|
if (!remoteEntityRow && remoteEC.entityName === 'options') {
|
||||||
return; // can be undefined for options with isSynced=false
|
return; // can be undefined for options with isSynced=false
|
||||||
}
|
}
|
||||||
|
|
||||||
const updated = remoteEC.entityName === 'note_reordering'
|
const updated = remoteEC.entityName === 'note_reordering'
|
||||||
? updateNoteReordering(remoteEC, remoteEntityRow, instanceId)
|
? updateNoteReordering(remoteEC, remoteEntityRow, instanceId)
|
||||||
: updateNormalEntity(remoteEC, remoteEntityRow, instanceId);
|
: updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext);
|
||||||
|
|
||||||
if (updated) {
|
if (updated) {
|
||||||
if (remoteEntityRow?.isDeleted) {
|
if (remoteEntityRow?.isDeleted) {
|
||||||
@ -29,11 +65,12 @@ function updateEntity(remoteEC, remoteEntityRow, instanceId) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext) {
|
||||||
const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]);
|
const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]);
|
||||||
|
|
||||||
if (!localEC?.isErased && remoteEC.isErased) {
|
if (!localEC?.isErased && remoteEC.isErased) {
|
||||||
eraseEntity(remoteEC, instanceId);
|
eraseEntity(remoteEC, instanceId);
|
||||||
|
updateContext.erased++;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} else if (localEC?.isErased && !remoteEC.isErased) {
|
} else if (localEC?.isErased && !remoteEC.isErased) {
|
||||||
@ -42,10 +79,15 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
|||||||
|
|
||||||
return false;
|
return false;
|
||||||
} else if (localEC?.isErased && remoteEC.isErased) {
|
} else if (localEC?.isErased && remoteEC.isErased) {
|
||||||
|
updateContext.alreadyErased++;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!localEC || localEC.utcDateChanged <= remoteEC.utcDateChanged) {
|
if (!localEC || localEC.utcDateChanged <= remoteEC.utcDateChanged) {
|
||||||
|
if (!remoteEntityRow) {
|
||||||
|
throw new Error(`Empty entity row for: ${JSON.stringify(remoteEC)}`);
|
||||||
|
}
|
||||||
|
|
||||||
if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
|
if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
|
||||||
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
|
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
|
||||||
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
|
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
|
||||||
@ -61,6 +103,9 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
|||||||
|
|
||||||
sql.replace(remoteEC.entityName, remoteEntityRow);
|
sql.replace(remoteEC.entityName, remoteEntityRow);
|
||||||
|
|
||||||
|
updateContext.updated[remoteEC.entityName] = updateContext.updated[remoteEC.entityName] || [];
|
||||||
|
updateContext.updated[remoteEC.entityName].push(remoteEC.entityId);
|
||||||
|
|
||||||
if (!localEC || localEC.utcDateChanged < remoteEC.utcDateChanged) {
|
if (!localEC || localEC.utcDateChanged < remoteEC.utcDateChanged) {
|
||||||
entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId);
|
entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId);
|
||||||
}
|
}
|
||||||
@ -77,6 +122,10 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
|
function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
|
||||||
|
if (!remoteEntityRow) {
|
||||||
|
throw new Error(`Empty note_reordering body for: ${JSON.stringify(remoteEC)}`);
|
||||||
|
}
|
||||||
|
|
||||||
for (const key in remoteEntityRow) {
|
for (const key in remoteEntityRow) {
|
||||||
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key], key]);
|
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key], key]);
|
||||||
}
|
}
|
||||||
@ -110,6 +159,15 @@ function eraseEntity(entityChange, instanceId) {
|
|||||||
entityChangesService.putEntityChangeWithInstanceId(entityChange, instanceId);
|
entityChangesService.putEntityChangeWithInstanceId(entityChange, instanceId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function logUpdateContext(updateContext) {
|
||||||
|
const message = JSON.stringify(updateContext)
|
||||||
|
.replaceAll('"', '')
|
||||||
|
.replaceAll(":", ": ")
|
||||||
|
.replaceAll(",", ", ");
|
||||||
|
|
||||||
|
log.info(message.substr(1, message.length - 2));
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
updateEntity
|
updateEntities
|
||||||
};
|
};
|
||||||
|
@ -25,6 +25,10 @@ function md5(content) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function hashedBlobId(content) {
|
function hashedBlobId(content) {
|
||||||
|
if (content === null || content === undefined) {
|
||||||
|
content = "";
|
||||||
|
}
|
||||||
|
|
||||||
// sha512 is faster than sha256
|
// sha512 is faster than sha256
|
||||||
const base64Hash = crypto.createHash('sha512').update(content).digest('base64');
|
const base64Hash = crypto.createHash('sha512').update(content).digest('base64');
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user