mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
Merge remote-tracking branch 'origin/better-sqlite3'
# Conflicts: # libraries/ckeditor/ckeditor.js # libraries/ckeditor/ckeditor.js.map # package-lock.json # package.json # src/public/app/services/utils.js # src/public/app/widgets/type_widgets/editable_text.js # src/services/utils.js
This commit is contained in:
commit
a7d9870846
@ -33,6 +33,9 @@ find $DIR/libraries -name "*.map" -type f -delete
|
|||||||
|
|
||||||
rm -r $DIR/src/public/app
|
rm -r $DIR/src/public/app
|
||||||
|
|
||||||
|
rm -r $DIR/node_modules/sqlite3/build
|
||||||
|
rm -r $DIR/node_modules/sqlite3/deps
|
||||||
|
|
||||||
sed -i -e 's/app\/desktop.js/app-dist\/desktop.js/g' $DIR/src/views/desktop.ejs
|
sed -i -e 's/app\/desktop.js/app-dist\/desktop.js/g' $DIR/src/views/desktop.ejs
|
||||||
sed -i -e 's/app\/mobile.js/app-dist\/mobile.js/g' $DIR/src/views/mobile.ejs
|
sed -i -e 's/app\/mobile.js/app-dist\/mobile.js/g' $DIR/src/views/mobile.ejs
|
||||||
sed -i -e 's/app\/setup.js/app-dist\/setup.js/g' $DIR/src/views/setup.ejs
|
sed -i -e 's/app\/setup.js/app-dist\/setup.js/g' $DIR/src/views/setup.ejs
|
BIN
db/demo.zip
BIN
db/demo.zip
Binary file not shown.
@ -24,8 +24,6 @@ app.on('window-all-closed', () => {
|
|||||||
app.on('ready', async () => {
|
app.on('ready', async () => {
|
||||||
app.setAppUserModelId('com.github.zadam.trilium');
|
app.setAppUserModelId('com.github.zadam.trilium');
|
||||||
|
|
||||||
await sqlInit.dbConnection;
|
|
||||||
|
|
||||||
// if db is not initialized -> setup process
|
// if db is not initialized -> setup process
|
||||||
// if db is initialized, then we need to wait until the migration process is finished
|
// if db is initialized, then we need to wait until the migration process is finished
|
||||||
if (await sqlInit.isDbInitialized()) {
|
if (await sqlInit.isDbInitialized()) {
|
||||||
|
2
libraries/ckeditor/ckeditor.js
vendored
2
libraries/ckeditor/ckeditor.js
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1724
package-lock.json
generated
1724
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@
|
|||||||
"name": "trilium",
|
"name": "trilium",
|
||||||
"productName": "Trilium Notes",
|
"productName": "Trilium Notes",
|
||||||
"description": "Trilium Notes",
|
"description": "Trilium Notes",
|
||||||
"version": "0.42.3",
|
"version": "0.43.0-beta",
|
||||||
"license": "AGPL-3.0-only",
|
"license": "AGPL-3.0-only",
|
||||||
"main": "electron.js",
|
"main": "electron.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
@ -25,6 +25,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"async-mutex": "0.2.2",
|
"async-mutex": "0.2.2",
|
||||||
"axios": "0.19.2",
|
"axios": "0.19.2",
|
||||||
|
"better-sqlite3": "^7.1.0",
|
||||||
"body-parser": "1.19.0",
|
"body-parser": "1.19.0",
|
||||||
"cls-hooked": "4.2.2",
|
"cls-hooked": "4.2.2",
|
||||||
"commonmark": "0.29.1",
|
"commonmark": "0.29.1",
|
||||||
@ -81,7 +82,7 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"cross-env": "^7.0.2",
|
"cross-env": "^7.0.2",
|
||||||
"electron": "9.0.2",
|
"electron": "9.0.4",
|
||||||
"electron-builder": "22.7.0",
|
"electron-builder": "22.7.0",
|
||||||
"electron-packager": "14.2.1",
|
"electron-packager": "14.2.1",
|
||||||
"electron-rebuild": "1.11.0",
|
"electron-rebuild": "1.11.0",
|
||||||
|
@ -1,12 +1,24 @@
|
|||||||
const backupService = require('./services/backup');
|
const backupService = require('./services/backup');
|
||||||
|
const sqlInit = require('./services/sql_init');
|
||||||
|
require('./entities/entity_constructor');
|
||||||
|
|
||||||
|
sqlInit.dbReady.then(async () => {
|
||||||
|
try {
|
||||||
|
console.log("Starting anonymization...");
|
||||||
|
|
||||||
|
const resp = await backupService.anonymize();
|
||||||
|
|
||||||
backupService.anonymize().then(resp => {
|
|
||||||
if (resp.success) {
|
if (resp.success) {
|
||||||
console.log("Anonymization failed.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log("Anonymized file has been saved to: " + resp.anonymizedFilePath);
|
console.log("Anonymized file has been saved to: " + resp.anonymizedFilePath);
|
||||||
}
|
|
||||||
|
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.log("Anonymization failed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.error(e.message, e.stack);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
11
src/app.js
11
src/app.js
@ -31,17 +31,6 @@ app.use((req, res, next) => {
|
|||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
|
|
||||||
app.use((req, res, next) => {
|
|
||||||
cls.namespace.bindEmitter(req);
|
|
||||||
cls.namespace.bindEmitter(res);
|
|
||||||
|
|
||||||
cls.init(() => {
|
|
||||||
cls.namespace.set("Hi");
|
|
||||||
|
|
||||||
next();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
app.use(bodyParser.json({limit: '500mb'}));
|
app.use(bodyParser.json({limit: '500mb'}));
|
||||||
app.use(bodyParser.urlencoded({extended: false}));
|
app.use(bodyParser.urlencoded({extended: false}));
|
||||||
app.use(cookieParser());
|
app.use(cookieParser());
|
||||||
|
@ -105,7 +105,7 @@ class Attribute extends Entity {
|
|||||||
|
|
||||||
// cannot be static!
|
// cannot be static!
|
||||||
updatePojo(pojo) {
|
updatePojo(pojo) {
|
||||||
delete pojo.__note;
|
delete pojo.__note; // FIXME: probably note necessary anymore
|
||||||
}
|
}
|
||||||
|
|
||||||
createClone(type, name, value, isInheritable) {
|
createClone(type, name, value, isInheritable) {
|
||||||
|
@ -152,10 +152,10 @@ function AttributesModel() {
|
|||||||
attr.value = treeService.getNoteIdFromNotePath(attr.selectedPath);
|
attr.value = treeService.getNoteIdFromNotePath(attr.selectedPath);
|
||||||
}
|
}
|
||||||
else if (attr.type === 'label-definition') {
|
else if (attr.type === 'label-definition') {
|
||||||
attr.value = attr.labelDefinition;
|
attr.value = JSON.stringify(attr.labelDefinition);
|
||||||
}
|
}
|
||||||
else if (attr.type === 'relation-definition') {
|
else if (attr.type === 'relation-definition') {
|
||||||
attr.value = attr.relationDefinition;
|
attr.value = JSON.stringify(attr.relationDefinition);
|
||||||
}
|
}
|
||||||
|
|
||||||
delete attr.labelValue;
|
delete attr.labelValue;
|
||||||
|
@ -16,18 +16,18 @@ export async function showDialog(ancestorNoteId) {
|
|||||||
ancestorNoteId = hoistedNoteService.getHoistedNoteId();
|
ancestorNoteId = hoistedNoteService.getHoistedNoteId();
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await server.get('recent-changes/' + ancestorNoteId);
|
const recentChangesRows = await server.get('recent-changes/' + ancestorNoteId);
|
||||||
|
|
||||||
// preload all notes into cache
|
// preload all notes into cache
|
||||||
await treeCache.getNotes(result.map(r => r.noteId), true);
|
await treeCache.getNotes(recentChangesRows.map(r => r.noteId), true);
|
||||||
|
|
||||||
$content.empty();
|
$content.empty();
|
||||||
|
|
||||||
if (result.length === 0) {
|
if (recentChangesRows.length === 0) {
|
||||||
$content.append("No changes yet ...");
|
$content.append("No changes yet ...");
|
||||||
}
|
}
|
||||||
|
|
||||||
const groupedByDate = groupByDate(result);
|
const groupedByDate = groupByDate(recentChangesRows);
|
||||||
|
|
||||||
for (const [dateDay, dayChanges] of groupedByDate) {
|
for (const [dateDay, dayChanges] of groupedByDate) {
|
||||||
const $changesList = $('<ul>');
|
const $changesList = $('<ul>');
|
||||||
@ -95,10 +95,10 @@ export async function showDialog(ancestorNoteId) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function groupByDate(result) {
|
function groupByDate(rows) {
|
||||||
const groupedByDate = new Map();
|
const groupedByDate = new Map();
|
||||||
|
|
||||||
for (const row of result) {
|
for (const row of rows) {
|
||||||
const dateDay = row.date.substr(0, 10);
|
const dateDay = row.date.substr(0, 10);
|
||||||
|
|
||||||
if (!groupedByDate.has(dateDay)) {
|
if (!groupedByDate.has(dateDay)) {
|
||||||
|
@ -23,8 +23,12 @@ class Attribute {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/** @returns {NoteShort} */
|
/** @returns {NoteShort} */
|
||||||
async getNote() {
|
getNote() {
|
||||||
return await this.treeCache.getNote(this.noteId);
|
return this.treeCache.notes[this.noteId];
|
||||||
|
}
|
||||||
|
|
||||||
|
get targetNoteId() { // alias
|
||||||
|
return this.type === 'relation' ? this.value : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
get jsonValue() {
|
get jsonValue() {
|
||||||
@ -43,6 +47,34 @@ class Attribute {
|
|||||||
get toString() {
|
get toString() {
|
||||||
return `Attribute(attributeId=${this.attributeId}, type=${this.type}, name=${this.name}, value=${this.value})`;
|
return `Attribute(attributeId=${this.attributeId}, type=${this.type}, name=${this.name}, value=${this.value})`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return {boolean} - returns true if this attribute has the potential to influence the note in the argument.
|
||||||
|
* That can happen in multiple ways:
|
||||||
|
* 1. attribute is owned by the note
|
||||||
|
* 2. attribute is owned by the template of the note
|
||||||
|
* 3. attribute is owned by some note's ancestor and is inheritable
|
||||||
|
*/
|
||||||
|
isAffecting(affectedNote) {
|
||||||
|
const attrNote = this.getNote();
|
||||||
|
const owningNotes = [affectedNote, ...affectedNote.getTemplateNotes()];
|
||||||
|
|
||||||
|
for (const owningNote of owningNotes) {
|
||||||
|
if (owningNote.noteId === attrNote.noteId) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isInheritable) {
|
||||||
|
for (const owningNote of owningNotes) {
|
||||||
|
if (owningNote.hasAncestor(attrNote)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default Attribute;
|
export default Attribute;
|
||||||
|
@ -437,6 +437,35 @@ class NoteShort {
|
|||||||
return targets;
|
return targets;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns {NoteShort[]}
|
||||||
|
*/
|
||||||
|
getTemplateNotes() {
|
||||||
|
const relations = this.getRelations('template');
|
||||||
|
|
||||||
|
return relations.map(rel => this.treeCache.notes[rel.value]);
|
||||||
|
}
|
||||||
|
|
||||||
|
hasAncestor(ancestorNote) {
|
||||||
|
if (this.noteId === ancestorNote.noteId) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const templateNote of this.getTemplateNotes()) {
|
||||||
|
if (templateNote.hasAncestor(ancestorNote)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const parentNote of this.getParentNotes()) {
|
||||||
|
if (parentNote.hasAncestor(ancestorNote)) {console.log(parentNote);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear note's attributes cache to force fresh reload for next attribute request.
|
* Clear note's attributes cache to force fresh reload for next attribute request.
|
||||||
* Cache is note instance scoped.
|
* Cache is note instance scoped.
|
||||||
@ -455,6 +484,15 @@ class NoteShort {
|
|||||||
.map(attributeId => this.treeCache.attributes[attributeId]);
|
.map(attributeId => this.treeCache.attributes[attributeId]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return note complement which is most importantly note's content
|
||||||
|
*
|
||||||
|
* @return {Promise<NoteComplement>}
|
||||||
|
*/
|
||||||
|
async getNoteComplement() {
|
||||||
|
return await this.treeCache.getNoteComplement(this.noteId);
|
||||||
|
}
|
||||||
|
|
||||||
get toString() {
|
get toString() {
|
||||||
return `Note(noteId=${this.noteId}, title=${this.title})`;
|
return `Note(noteId=${this.noteId}, title=${this.title})`;
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import ScriptContext from "./script_context.js";
|
import ScriptContext from "./script_context.js";
|
||||||
import server from "./server.js";
|
import server from "./server.js";
|
||||||
import toastService from "./toast.js";
|
import toastService from "./toast.js";
|
||||||
import treeCache from "./tree_cache.js";
|
|
||||||
|
|
||||||
async function getAndExecuteBundle(noteId, originEntity = null) {
|
async function getAndExecuteBundle(noteId, originEntity = null) {
|
||||||
const bundle = await server.get('script/bundle/' + noteId);
|
const bundle = await server.get('script/bundle/' + noteId);
|
||||||
|
@ -403,6 +403,13 @@ function FrontendScriptApi(startNote, currentNote, originEntity = null, $contain
|
|||||||
* @method
|
* @method
|
||||||
*/
|
*/
|
||||||
this.waitUntilSynced = ws.waitForMaxKnownSyncId;
|
this.waitUntilSynced = ws.waitForMaxKnownSyncId;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This will refresh all currently opened notes which have included note specified in the parameter
|
||||||
|
*
|
||||||
|
* @param includedNoteId - noteId of the included note
|
||||||
|
*/
|
||||||
|
this.refreshIncludedNote = includedNoteId => appContext.triggerEvent('refreshIncludedNote', {noteId: includedNoteId});
|
||||||
}
|
}
|
||||||
|
|
||||||
export default FrontendScriptApi;
|
export default FrontendScriptApi;
|
@ -49,11 +49,7 @@ function setupGlobs() {
|
|||||||
|
|
||||||
let message = "Uncaught error: ";
|
let message = "Uncaught error: ";
|
||||||
|
|
||||||
if (string.includes("Cannot read property 'defaultView' of undefined")) {
|
if (string.includes("script error")) {
|
||||||
// ignore this specific error which is very common but we don't know where it comes from
|
|
||||||
// and it seems to be harmless
|
|
||||||
return true;
|
|
||||||
} else if (string.includes("script error")) {
|
|
||||||
message += 'No details available';
|
message += 'No details available';
|
||||||
} else {
|
} else {
|
||||||
message += [
|
message += [
|
||||||
@ -61,8 +57,9 @@ function setupGlobs() {
|
|||||||
'URL: ' + url,
|
'URL: ' + url,
|
||||||
'Line: ' + lineNo,
|
'Line: ' + lineNo,
|
||||||
'Column: ' + columnNo,
|
'Column: ' + columnNo,
|
||||||
'Error object: ' + JSON.stringify(error)
|
'Error object: ' + JSON.stringify(error),
|
||||||
].join(' - ');
|
'Stack: ' + error && error.stack
|
||||||
|
].join(', ');
|
||||||
}
|
}
|
||||||
|
|
||||||
ws.logError(message);
|
ws.logError(message);
|
||||||
|
@ -137,7 +137,7 @@ function linkContextMenu(e) {
|
|||||||
$(document).on('mousedown', "a[data-action='note']", goToLink);
|
$(document).on('mousedown', "a[data-action='note']", goToLink);
|
||||||
$(document).on('mousedown', 'div.popover-content a, div.ui-tooltip-content a', goToLink);
|
$(document).on('mousedown', 'div.popover-content a, div.ui-tooltip-content a', goToLink);
|
||||||
$(document).on('dblclick', '.note-detail-text a', goToLink);
|
$(document).on('dblclick', '.note-detail-text a', goToLink);
|
||||||
$(document).on('mousedown', '.note-detail-text a', function (e) {
|
$(document).on('mousedown', '.note-detail-text a:not(.reference-link)', function (e) {
|
||||||
const $link = $(e.target).closest("a");
|
const $link = $(e.target).closest("a");
|
||||||
const notePath = getNotePathFromLink($link);
|
const notePath = getNotePathFromLink($link);
|
||||||
|
|
||||||
@ -161,6 +161,7 @@ $(document).on('mousedown', '.note-detail-text a', function (e) {
|
|||||||
$(document).on('mousedown', '.note-detail-book a', goToLink);
|
$(document).on('mousedown', '.note-detail-book a', goToLink);
|
||||||
$(document).on('mousedown', '.note-detail-render a', goToLink);
|
$(document).on('mousedown', '.note-detail-render a', goToLink);
|
||||||
$(document).on('mousedown', '.note-detail-text a.reference-link', goToLink);
|
$(document).on('mousedown', '.note-detail-text a.reference-link', goToLink);
|
||||||
|
$(document).on('mousedown', '.note-detail-readonly-text a.reference-link', goToLink);
|
||||||
$(document).on('mousedown', '.note-detail-readonly-text a', goToLink);
|
$(document).on('mousedown', '.note-detail-readonly-text a', goToLink);
|
||||||
$(document).on('mousedown', 'a.ck-link-actions__preview', goToLink);
|
$(document).on('mousedown', 'a.ck-link-actions__preview', goToLink);
|
||||||
$(document).on('click', 'a.ck-link-actions__preview', e => {
|
$(document).on('click', 'a.ck-link-actions__preview', e => {
|
||||||
|
@ -54,8 +54,9 @@ export default class LoadResults {
|
|||||||
this.attributes.push({attributeId, sourceId});
|
this.attributes.push({attributeId, sourceId});
|
||||||
}
|
}
|
||||||
|
|
||||||
getAttributes() {
|
getAttributes(sourceId = 'none') {
|
||||||
return this.attributes
|
return this.attributes
|
||||||
|
.filter(row => row.sourceId !== sourceId)
|
||||||
.map(row => this.treeCache.attributes[row.attributeId])
|
.map(row => this.treeCache.attributes[row.attributeId])
|
||||||
.filter(attr => !!attr);
|
.filter(attr => !!attr);
|
||||||
}
|
}
|
||||||
|
@ -1,29 +0,0 @@
|
|||||||
export default class Mutex {
|
|
||||||
constructor() {
|
|
||||||
this.queue = [];
|
|
||||||
this.pending = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
isLocked() {
|
|
||||||
return this.pending;
|
|
||||||
}
|
|
||||||
|
|
||||||
acquire() {
|
|
||||||
const ticket = new Promise(resolve => this.queue.push(resolve));
|
|
||||||
|
|
||||||
if (!this.pending) {
|
|
||||||
this.dispatchNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
return ticket;
|
|
||||||
}
|
|
||||||
|
|
||||||
dispatchNext() {
|
|
||||||
if (this.queue.length > 0) {
|
|
||||||
this.pending = true;
|
|
||||||
this.queue.shift()(this.dispatchNext.bind(this));
|
|
||||||
} else {
|
|
||||||
this.pending = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -8,8 +8,8 @@ async function syncNow() {
|
|||||||
toastService.showMessage("Sync finished successfully.");
|
toastService.showMessage("Sync finished successfully.");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (result.message.length > 50) {
|
if (result.message.length > 100) {
|
||||||
result.message = result.message.substr(0, 50);
|
result.message = result.message.substr(0, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
toastService.showError("Sync failed: " + result.message);
|
toastService.showError("Sync failed: " + result.message);
|
||||||
|
@ -22,7 +22,7 @@ async function resolveNotePath(notePath) {
|
|||||||
*
|
*
|
||||||
* @return {string[]}
|
* @return {string[]}
|
||||||
*/
|
*/
|
||||||
async function getRunPath(notePath) {
|
async function getRunPath(notePath, logErrors = true) {
|
||||||
utils.assertArguments(notePath);
|
utils.assertArguments(notePath);
|
||||||
|
|
||||||
notePath = notePath.split("-")[0].trim();
|
notePath = notePath.split("-")[0].trim();
|
||||||
@ -66,10 +66,14 @@ async function getRunPath(notePath) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!parents.some(p => p.noteId === parentNoteId)) {
|
if (!parents.some(p => p.noteId === parentNoteId)) {
|
||||||
console.debug(utils.now(), "Did not find parent " + parentNoteId + " for child " + childNoteId);
|
if (logErrors) {
|
||||||
|
console.log(utils.now(), "Did not find parent " + parentNoteId + " for child " + childNoteId);
|
||||||
|
}
|
||||||
|
|
||||||
if (parents.length > 0) {
|
if (parents.length > 0) {
|
||||||
console.debug(utils.now(), "Available parents:", parents);
|
if (logErrors) {
|
||||||
|
console.log(utils.now(), "Available parents:", parents);
|
||||||
|
}
|
||||||
|
|
||||||
const someNotePath = getSomeNotePath(parents[0]);
|
const someNotePath = getSomeNotePath(parents[0]);
|
||||||
|
|
||||||
@ -86,7 +90,10 @@ async function getRunPath(notePath) {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
if (logErrors) {
|
||||||
console.log("No parents so no run path.");
|
console.log("No parents so no run path.");
|
||||||
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -54,6 +54,10 @@ class TreeCache {
|
|||||||
if (attr.type === 'relation' && attr.name === 'template' && !(attr.value in existingNotes) && !noteIds.has(attr.value)) {
|
if (attr.type === 'relation' && attr.name === 'template' && !(attr.value in existingNotes) && !noteIds.has(attr.value)) {
|
||||||
missingNoteIds.push(attr.value);
|
missingNoteIds.push(attr.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!(attr.noteId in existingNotes) && !noteIds.has(attr.noteId)) {
|
||||||
|
missingNoteIds.push(attr.noteId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (missingNoteIds.length > 0) {
|
if (missingNoteIds.length > 0) {
|
||||||
@ -272,6 +276,9 @@ class TreeCache {
|
|||||||
return child.parentToBranch[parentNoteId];
|
return child.parentToBranch[parentNoteId];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return {Promise<NoteComplement>}
|
||||||
|
*/
|
||||||
async getNoteComplement(noteId) {
|
async getNoteComplement(noteId) {
|
||||||
if (!this.noteComplementPromises[noteId]) {
|
if (!this.noteComplementPromises[noteId]) {
|
||||||
this.noteComplementPromises[noteId] = server.get('notes/' + noteId).then(row => new NoteComplement(row));
|
this.noteComplementPromises[noteId] = server.get('notes/' + noteId).then(row => new NoteComplement(row));
|
||||||
|
@ -316,6 +316,24 @@ function dynamicRequire(moduleName) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function timeLimit(promise, limitMs) {
|
||||||
|
return new Promise((res, rej) => {
|
||||||
|
let resolved = false;
|
||||||
|
|
||||||
|
promise.then(result => {
|
||||||
|
resolved = true;
|
||||||
|
|
||||||
|
res(result);
|
||||||
|
});
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!resolved) {
|
||||||
|
rej(new Error('Process exceeded time limit ' + limitMs));
|
||||||
|
}
|
||||||
|
}, limitMs);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
reloadApp,
|
reloadApp,
|
||||||
parseDate,
|
parseDate,
|
||||||
@ -355,5 +373,6 @@ export default {
|
|||||||
normalizeShortcut,
|
normalizeShortcut,
|
||||||
copySelectionToClipboard,
|
copySelectionToClipboard,
|
||||||
isCKEditorInitialized,
|
isCKEditorInitialized,
|
||||||
dynamicRequire
|
dynamicRequire,
|
||||||
|
timeLimit
|
||||||
};
|
};
|
||||||
|
@ -25,7 +25,8 @@ function logError(message) {
|
|||||||
if (ws && ws.readyState === 1) {
|
if (ws && ws.readyState === 1) {
|
||||||
ws.send(JSON.stringify({
|
ws.send(JSON.stringify({
|
||||||
type: 'log-error',
|
type: 'log-error',
|
||||||
error: message
|
error: message,
|
||||||
|
stack: new Error().stack
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -156,7 +157,7 @@ async function consumeSyncData() {
|
|||||||
const nonProcessedSyncRows = allSyncRows.filter(sync => !processedSyncIds.has(sync.id));
|
const nonProcessedSyncRows = allSyncRows.filter(sync => !processedSyncIds.has(sync.id));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await processSyncRows(nonProcessedSyncRows);
|
await utils.timeLimit(processSyncRows(nonProcessedSyncRows), 5000);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logError(`Encountered error ${e.message}: ${e.stack}, reloading frontend.`);
|
logError(`Encountered error ${e.message}: ${e.stack}, reloading frontend.`);
|
||||||
|
@ -1,13 +1,11 @@
|
|||||||
import utils from '../services/utils.js';
|
import utils from '../services/utils.js';
|
||||||
import Mutex from "../services/mutex.js";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract class for all components in the Trilium's frontend.
|
* Abstract class for all components in the Trilium's frontend.
|
||||||
*
|
*
|
||||||
* Contains also event implementation with following properties:
|
* Contains also event implementation with following properties:
|
||||||
* - event / command distribution is synchronous which among others mean that events are well ordered - event
|
* - event / command distribution is synchronous which among others mean that events are well ordered - event
|
||||||
* which was sent out first will also be processed first by the component since it was added to the mutex queue
|
* which was sent out first will also be processed first by the component
|
||||||
* as the first one
|
|
||||||
* - execution of the event / command is asynchronous - each component executes the event on its own without regard for
|
* - execution of the event / command is asynchronous - each component executes the event on its own without regard for
|
||||||
* other components.
|
* other components.
|
||||||
* - although the execution is async, we are collecting all the promises and therefore it is possible to wait until the
|
* - although the execution is async, we are collecting all the promises and therefore it is possible to wait until the
|
||||||
@ -19,7 +17,6 @@ export default class Component {
|
|||||||
/** @type Component[] */
|
/** @type Component[] */
|
||||||
this.children = [];
|
this.children = [];
|
||||||
this.initialized = Promise.resolve();
|
this.initialized = Promise.resolve();
|
||||||
this.mutex = new Mutex();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setParent(parent) {
|
setParent(parent) {
|
||||||
@ -79,22 +76,8 @@ export default class Component {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
let release;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (this.mutex.isLocked()) {
|
|
||||||
console.debug("Mutex locked for", this.constructor.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
release = await this.mutex.acquire();
|
|
||||||
|
|
||||||
await fun.call(this, data);
|
await fun.call(this, data);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} finally {
|
|
||||||
if (release) {
|
|
||||||
release();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,7 @@ const TPL = `
|
|||||||
|
|
||||||
<a class="dropdown-item sync-now-button" title="Trigger sync">
|
<a class="dropdown-item sync-now-button" title="Trigger sync">
|
||||||
<span class="bx bx-refresh"></span>
|
<span class="bx bx-refresh"></span>
|
||||||
Sync (<span id="outstanding-syncs-count">0</span>)
|
Sync now (<span id="outstanding-syncs-count">0</span>)
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
<a class="dropdown-item" data-trigger-command="openNewWindow">
|
<a class="dropdown-item" data-trigger-command="openNewWindow">
|
||||||
|
@ -271,13 +271,30 @@ export default class NoteDetailWidget extends TabAwareWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async entitiesReloadedEvent({loadResults}) {
|
async entitiesReloadedEvent({loadResults}) {
|
||||||
// FIXME: we should test what happens when the loaded note is deleted
|
|
||||||
|
|
||||||
if (loadResults.isNoteContentReloaded(this.noteId, this.componentId)
|
if (loadResults.isNoteContentReloaded(this.noteId, this.componentId)
|
||||||
|| (loadResults.isNoteReloaded(this.noteId, this.componentId) && (this.type !== await this.getWidgetType() || this.mime !== this.note.mime))) {
|
|| (loadResults.isNoteReloaded(this.noteId, this.componentId) && (this.type !== await this.getWidgetType() || this.mime !== this.note.mime))) {
|
||||||
|
|
||||||
this.handleEvent('noteTypeMimeChanged', {noteId: this.noteId});
|
this.handleEvent('noteTypeMimeChanged', {noteId: this.noteId});
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
const attrs = loadResults.getAttributes();
|
||||||
|
|
||||||
|
const label = attrs.find(attr =>
|
||||||
|
attr.type === 'label'
|
||||||
|
&& ['readOnly', 'autoReadOnlyDisabled', 'cssClass', 'bookZoomLevel'].includes(attr.name)
|
||||||
|
&& attr.isAffecting(this.note));
|
||||||
|
|
||||||
|
const relation = attrs.find(attr =>
|
||||||
|
attr.type === 'relation'
|
||||||
|
&& ['template', 'renderNote'].includes(attr.name)
|
||||||
|
&& attr.isAffecting(this.note));
|
||||||
|
|
||||||
|
if (label || relation) {
|
||||||
|
// probably incorrect event
|
||||||
|
// calling this.refresh() is not enough since the event needs to be propagated to children as well
|
||||||
|
this.handleEvent('noteTypeMimeChanged', {noteId: this.noteId});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeUnloadEvent() {
|
beforeUnloadEvent() {
|
||||||
|
@ -580,7 +580,17 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
|
|
||||||
const noteList = [];
|
const noteList = [];
|
||||||
|
|
||||||
|
const hideArchivedNotes = this.hideArchivedNotes;
|
||||||
|
|
||||||
for (const branch of this.getChildBranches(parentNote)) {
|
for (const branch of this.getChildBranches(parentNote)) {
|
||||||
|
if (hideArchivedNotes) {
|
||||||
|
const note = await branch.getNote();
|
||||||
|
|
||||||
|
if (note.hasLabel('archived')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const node = await this.prepareNode(branch);
|
const node = await this.prepareNode(branch);
|
||||||
|
|
||||||
noteList.push(node);
|
noteList.push(node);
|
||||||
@ -604,6 +614,11 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
childBranches = childBranches.filter(branch => !imageLinks.find(rel => rel.value === branch.noteId));
|
childBranches = childBranches.filter(branch => !imageLinks.find(rel => rel.value === branch.noteId));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we're not checking hideArchivedNotes since that would mean we need to lazy load the child notes
|
||||||
|
// which would seriously slow down everything.
|
||||||
|
// we check this flag only once user chooses to expand the parent. This has the negative consequence that
|
||||||
|
// note may appear as folder but not contain any children when all of them are archived
|
||||||
|
|
||||||
return childBranches;
|
return childBranches;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -732,17 +747,20 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/** @return {FancytreeNode} */
|
/** @return {FancytreeNode} */
|
||||||
async getNodeFromPath(notePath, expand = false) {
|
async getNodeFromPath(notePath, expand = false, logErrors = true) {
|
||||||
utils.assertArguments(notePath);
|
utils.assertArguments(notePath);
|
||||||
|
|
||||||
const hoistedNoteId = hoistedNoteService.getHoistedNoteId();
|
const hoistedNoteId = hoistedNoteService.getHoistedNoteId();
|
||||||
/** @var {FancytreeNode} */
|
/** @var {FancytreeNode} */
|
||||||
let parentNode = null;
|
let parentNode = null;
|
||||||
|
|
||||||
const runPath = await treeService.getRunPath(notePath);
|
const runPath = await treeService.getRunPath(notePath, logErrors);
|
||||||
|
|
||||||
if (!runPath) {
|
if (!runPath) {
|
||||||
|
if (logErrors) {
|
||||||
console.error("Could not find run path for notePath:", notePath);
|
console.error("Could not find run path for notePath:", notePath);
|
||||||
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -779,7 +797,10 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
foundChildNode = this.findChildNode(parentNode, childNoteId);
|
foundChildNode = this.findChildNode(parentNode, childNoteId);
|
||||||
|
|
||||||
if (!foundChildNode) {
|
if (!foundChildNode) {
|
||||||
|
if (logErrors) {
|
||||||
ws.logError(`Can't find node for child node of noteId=${childNoteId} for parent of noteId=${parentNode.data.noteId} and hoistedNoteId=${hoistedNoteId}, requested path is ${notePath}`);
|
ws.logError(`Can't find node for child node of noteId=${childNoteId} for parent of noteId=${parentNode.data.noteId} and hoistedNoteId=${hoistedNoteId}, requested path is ${notePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -806,8 +827,8 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/** @return {FancytreeNode} */
|
/** @return {FancytreeNode} */
|
||||||
async expandToNote(notePath) {
|
async expandToNote(notePath, logErrors = true) {
|
||||||
return this.getNodeFromPath(notePath, true);
|
return this.getNodeFromPath(notePath, true, logErrors);
|
||||||
}
|
}
|
||||||
|
|
||||||
updateNode(node) {
|
updateNode(node) {
|
||||||
@ -1008,7 +1029,7 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (activeNotePath) {
|
if (activeNotePath) {
|
||||||
let node = await this.expandToNote(activeNotePath);
|
let node = await this.expandToNote(activeNotePath, false);
|
||||||
|
|
||||||
if (node && node.data.noteId !== activeNoteId) {
|
if (node && node.data.noteId !== activeNoteId) {
|
||||||
// if the active note has been moved elsewhere then it won't be found by the path
|
// if the active note has been moved elsewhere then it won't be found by the path
|
||||||
@ -1024,7 +1045,7 @@ export default class NoteTreeWidget extends TabAwareWidget {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// this is used when original note has been deleted and we want to move the focus to the note above/below
|
// this is used when original note has been deleted and we want to move the focus to the note above/below
|
||||||
node = await this.expandToNote(nextNotePath);
|
node = await this.expandToNote(nextNotePath, false);
|
||||||
|
|
||||||
if (node) {
|
if (node) {
|
||||||
await appContext.tabManager.getActiveTabContext().setNote(nextNotePath);
|
await appContext.tabManager.getActiveTabContext().setNote(nextNotePath);
|
||||||
|
@ -149,6 +149,8 @@ export default class PromotedAttributesWidget extends TabAwareWidget {
|
|||||||
cb(filtered);
|
cb(filtered);
|
||||||
}
|
}
|
||||||
}]);
|
}]);
|
||||||
|
|
||||||
|
$input.on('autocomplete:selected', e => this.promotedAttributeChanged(e))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
else if (definition.labelType === 'number') {
|
else if (definition.labelType === 'number') {
|
||||||
@ -229,7 +231,7 @@ export default class PromotedAttributesWidget extends TabAwareWidget {
|
|||||||
.prop("title", "Remove this attribute")
|
.prop("title", "Remove this attribute")
|
||||||
.on('click', async () => {
|
.on('click', async () => {
|
||||||
if (valueAttr.attributeId) {
|
if (valueAttr.attributeId) {
|
||||||
await server.remove("notes/" + this.noteId + "/attributes/" + valueAttr.attributeId);
|
await server.remove("notes/" + this.noteId + "/attributes/" + valueAttr.attributeId, this.componentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
$tr.remove();
|
$tr.remove();
|
||||||
@ -263,8 +265,14 @@ export default class PromotedAttributesWidget extends TabAwareWidget {
|
|||||||
type: $attr.prop("attribute-type"),
|
type: $attr.prop("attribute-type"),
|
||||||
name: $attr.prop("attribute-name"),
|
name: $attr.prop("attribute-name"),
|
||||||
value: value
|
value: value
|
||||||
});
|
}, this.componentId);
|
||||||
|
|
||||||
$attr.prop("attribute-id", result.attributeId);
|
$attr.prop("attribute-id", result.attributeId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
entitiesReloadedEvent({loadResults}) {
|
||||||
|
if (loadResults.getAttributes(this.componentId).find(attr => attr.isAffecting(this.note))) {
|
||||||
|
this.refresh();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -62,4 +62,12 @@ export default class AbstractTextTypeWidget extends TypeWidget {
|
|||||||
|
|
||||||
$el.text(title);
|
$el.text(title);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
refreshIncludedNote($container, noteId) {
|
||||||
|
if ($container) {
|
||||||
|
$container.find(`section[data-note-id="${noteId}"]`).each((_, el) => {
|
||||||
|
this.loadIncludedNote(noteId, $(el));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -274,4 +274,8 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
|
|||||||
|
|
||||||
return notePath;
|
return notePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async refreshIncludedNoteEvent({noteId}) {
|
||||||
|
this.refreshIncludedNote(this.$editor, noteId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -81,4 +81,8 @@ export default class ReadOnlyTextTypeWidget extends AbstractTextTypeWidget {
|
|||||||
this.loadIncludedNote(noteId, $(el));
|
this.loadIncludedNote(noteId, $(el));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async refreshIncludedNoteEvent({noteId}) {
|
||||||
|
this.refreshIncludedNote(this.$content, noteId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,6 +68,8 @@
|
|||||||
|
|
||||||
.note-detail-image {
|
.note-detail-image {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
|
height: 100%;
|
||||||
|
overflow: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
.note-detail-image-view {
|
.note-detail-image-view {
|
||||||
|
@ -23,7 +23,11 @@ async function exportBranch(req, res) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
if (type === 'subtree' && (format === 'html' || format === 'markdown')) {
|
if (type === 'subtree' && (format === 'html' || format === 'markdown')) {
|
||||||
|
const start = Date.now();
|
||||||
|
|
||||||
await zipExportService.exportToZip(taskContext, branch, format, res);
|
await zipExportService.exportToZip(taskContext, branch, format, res);
|
||||||
|
|
||||||
|
console.log("Export took", Date.now() - start, "ms");
|
||||||
}
|
}
|
||||||
else if (type === 'single') {
|
else if (type === 'single') {
|
||||||
await singleExportService.exportSingleNote(taskContext, branch, format, res);
|
await singleExportService.exportSingleNote(taskContext, branch, format, res);
|
||||||
|
@ -51,7 +51,11 @@ async function importToBranch(req) {
|
|||||||
if (extension === '.tar' && options.explodeArchives) {
|
if (extension === '.tar' && options.explodeArchives) {
|
||||||
note = await tarImportService.importTar(taskContext, file.buffer, parentNote);
|
note = await tarImportService.importTar(taskContext, file.buffer, parentNote);
|
||||||
} else if (extension === '.zip' && options.explodeArchives) {
|
} else if (extension === '.zip' && options.explodeArchives) {
|
||||||
|
const start = Date.now();
|
||||||
|
|
||||||
note = await zipImportService.importZip(taskContext, file.buffer, parentNote);
|
note = await zipImportService.importZip(taskContext, file.buffer, parentNote);
|
||||||
|
|
||||||
|
console.log("Import took", Date.now() - start, "ms");
|
||||||
} else if (extension === '.opml' && options.explodeArchives) {
|
} else if (extension === '.opml' && options.explodeArchives) {
|
||||||
note = await opmlImportService.importOpml(taskContext, file.buffer, parentNote);
|
note = await opmlImportService.importOpml(taskContext, file.buffer, parentNote);
|
||||||
} else if (extension === '.enex' && options.explodeArchives) {
|
} else if (extension === '.enex' && options.explodeArchives) {
|
||||||
|
@ -68,7 +68,7 @@ async function loginToProtectedSession(req) {
|
|||||||
const protectedSessionId = protectedSessionService.setDataKey(decryptedDataKey);
|
const protectedSessionId = protectedSessionService.setDataKey(decryptedDataKey);
|
||||||
|
|
||||||
// this is set here so that event handlers have access to the protected session
|
// this is set here so that event handlers have access to the protected session
|
||||||
cls.namespace.set('protectedSessionId', protectedSessionId);
|
cls.set('protectedSessionId', protectedSessionId);
|
||||||
|
|
||||||
await eventService.emit(eventService.ENTER_PROTECTED_SESSION);
|
await eventService.emit(eventService.ENTER_PROTECTED_SESSION);
|
||||||
|
|
||||||
|
@ -8,34 +8,9 @@ const noteCacheService = require('../../services/note_cache/note_cache.js');
|
|||||||
async function getRecentChanges(req) {
|
async function getRecentChanges(req) {
|
||||||
const {ancestorNoteId} = req.params;
|
const {ancestorNoteId} = req.params;
|
||||||
|
|
||||||
const noteRows = await sql.getRows(
|
let recentChanges = [];
|
||||||
`
|
|
||||||
SELECT * FROM (
|
|
||||||
SELECT note_revisions.noteId,
|
|
||||||
note_revisions.noteRevisionId,
|
|
||||||
note_revisions.dateLastEdited AS date
|
|
||||||
FROM note_revisions
|
|
||||||
ORDER BY note_revisions.dateLastEdited DESC
|
|
||||||
)
|
|
||||||
UNION ALL SELECT * FROM (
|
|
||||||
SELECT
|
|
||||||
notes.noteId,
|
|
||||||
NULL AS noteRevisionId,
|
|
||||||
dateModified AS date
|
|
||||||
FROM notes
|
|
||||||
ORDER BY dateModified DESC
|
|
||||||
)
|
|
||||||
ORDER BY date DESC`);
|
|
||||||
|
|
||||||
const recentChanges = [];
|
const noteRevisions = await sql.getRows(`
|
||||||
|
|
||||||
for (const noteRow of noteRows) {
|
|
||||||
if (!noteCacheService.isInAncestor(noteRow.noteId, ancestorNoteId)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (noteRow.noteRevisionId) {
|
|
||||||
recentChanges.push(await sql.getRow(`
|
|
||||||
SELECT
|
SELECT
|
||||||
notes.noteId,
|
notes.noteId,
|
||||||
notes.isDeleted AS current_isDeleted,
|
notes.isDeleted AS current_isDeleted,
|
||||||
@ -44,14 +19,19 @@ async function getRecentChanges(req) {
|
|||||||
notes.title AS current_title,
|
notes.title AS current_title,
|
||||||
notes.isProtected AS current_isProtected,
|
notes.isProtected AS current_isProtected,
|
||||||
note_revisions.title,
|
note_revisions.title,
|
||||||
|
note_revisions.utcDateCreated AS utcDate,
|
||||||
note_revisions.dateCreated AS date
|
note_revisions.dateCreated AS date
|
||||||
FROM
|
FROM
|
||||||
note_revisions
|
note_revisions
|
||||||
JOIN notes USING(noteId)
|
JOIN notes USING(noteId)`);
|
||||||
WHERE noteRevisionId = ?`, [noteRow.noteRevisionId]));
|
|
||||||
|
for (const noteRevision of noteRevisions) {
|
||||||
|
if (noteCacheService.isInAncestor(noteRevision.noteId, ancestorNoteId)) {
|
||||||
|
recentChanges.push(noteRevision);
|
||||||
}
|
}
|
||||||
else {
|
}
|
||||||
recentChanges.push(await sql.getRow(`
|
|
||||||
|
const notes = await sql.getRows(`
|
||||||
SELECT
|
SELECT
|
||||||
notes.noteId,
|
notes.noteId,
|
||||||
notes.isDeleted AS current_isDeleted,
|
notes.isDeleted AS current_isDeleted,
|
||||||
@ -60,16 +40,22 @@ async function getRecentChanges(req) {
|
|||||||
notes.title AS current_title,
|
notes.title AS current_title,
|
||||||
notes.isProtected AS current_isProtected,
|
notes.isProtected AS current_isProtected,
|
||||||
notes.title,
|
notes.title,
|
||||||
notes.dateModified AS date
|
notes.utcDateCreated AS utcDate,
|
||||||
|
notes.dateCreated AS date
|
||||||
FROM
|
FROM
|
||||||
notes
|
notes`);
|
||||||
WHERE noteId = ?`, [noteRow.noteId]));
|
|
||||||
|
for (const note of notes) {
|
||||||
|
if (noteCacheService.isInAncestor(note.noteId, ancestorNoteId)) {
|
||||||
|
recentChanges.push(note);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (recentChanges.length >= 200) {
|
recentChanges.sort((a, b) => a.utcDate > b.utcDate ? -1 : 1);
|
||||||
break;
|
|
||||||
}
|
recentChanges = recentChanges.slice(0, Math.min(500, recentChanges.length));
|
||||||
}
|
|
||||||
|
console.log(recentChanges);
|
||||||
|
|
||||||
for (const change of recentChanges) {
|
for (const change of recentChanges) {
|
||||||
if (change.current_isProtected) {
|
if (change.current_isProtected) {
|
||||||
|
@ -55,6 +55,8 @@ async function checkSync() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function syncNow() {
|
async function syncNow() {
|
||||||
|
log.info("Received request to trigger sync now.");
|
||||||
|
|
||||||
return await syncService.sync();
|
return await syncService.sync();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,9 +81,12 @@ function apiRoute(method, path, routeHandler) {
|
|||||||
function route(method, path, middleware, routeHandler, resultHandler, transactional = true) {
|
function route(method, path, middleware, routeHandler, resultHandler, transactional = true) {
|
||||||
router[method](path, ...middleware, async (req, res, next) => {
|
router[method](path, ...middleware, async (req, res, next) => {
|
||||||
try {
|
try {
|
||||||
|
cls.namespace.bindEmitter(req);
|
||||||
|
cls.namespace.bindEmitter(res);
|
||||||
|
|
||||||
const result = await cls.init(async () => {
|
const result = await cls.init(async () => {
|
||||||
cls.namespace.set('sourceId', req.headers['trilium-source-id']);
|
cls.set('sourceId', req.headers['trilium-source-id']);
|
||||||
cls.namespace.set('localNowDateTime', req.headers['`trilium-local-now-datetime`']);
|
cls.set('localNowDateTime', req.headers['`trilium-local-now-datetime`']);
|
||||||
protectedSessionService.setProtectedSessionId(req);
|
protectedSessionService.setProtectedSessionId(req);
|
||||||
|
|
||||||
if (transactional) {
|
if (transactional) {
|
||||||
|
@ -27,9 +27,9 @@ const BUILTIN_ATTRIBUTES = [
|
|||||||
{ type: 'label', name: 'customRequestHandler', isDangerous: true },
|
{ type: 'label', name: 'customRequestHandler', isDangerous: true },
|
||||||
{ type: 'label', name: 'customResourceProvider', isDangerous: true },
|
{ type: 'label', name: 'customResourceProvider', isDangerous: true },
|
||||||
{ type: 'label', name: 'bookZoomLevel', isDangerous: false },
|
{ type: 'label', name: 'bookZoomLevel', isDangerous: false },
|
||||||
|
{ type: 'label', name: 'widget', isDangerous: true },
|
||||||
|
|
||||||
// relation names
|
// relation names
|
||||||
{ type: 'relation', name: 'runOnNoteView', isDangerous: true },
|
|
||||||
{ type: 'relation', name: 'runOnNoteCreation', isDangerous: true },
|
{ type: 'relation', name: 'runOnNoteCreation', isDangerous: true },
|
||||||
{ type: 'relation', name: 'runOnNoteTitleChange', isDangerous: true },
|
{ type: 'relation', name: 'runOnNoteTitleChange', isDangerous: true },
|
||||||
{ type: 'relation', name: 'runOnNoteChange', isDangerous: true },
|
{ type: 'relation', name: 'runOnNoteChange', isDangerous: true },
|
||||||
@ -122,6 +122,17 @@ function isAttributeDangerous(type, name) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getBuiltinAttributeNames() {
|
||||||
|
return BUILTIN_ATTRIBUTES
|
||||||
|
.map(attr => attr.name)
|
||||||
|
.concat([
|
||||||
|
'internalLink',
|
||||||
|
'imageLink',
|
||||||
|
'includeNoteLink',
|
||||||
|
'relationMapLink'
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getNotesWithLabel,
|
getNotesWithLabel,
|
||||||
getNotesWithLabels,
|
getNotesWithLabels,
|
||||||
@ -131,5 +142,6 @@ module.exports = {
|
|||||||
createAttribute,
|
createAttribute,
|
||||||
getAttributeNames,
|
getAttributeNames,
|
||||||
isAttributeType,
|
isAttributeType,
|
||||||
isAttributeDangerous
|
isAttributeDangerous,
|
||||||
|
getBuiltinAttributeNames
|
||||||
};
|
};
|
@ -7,7 +7,9 @@ const dataDir = require('./data_dir');
|
|||||||
const log = require('./log');
|
const log = require('./log');
|
||||||
const sqlInit = require('./sql_init');
|
const sqlInit = require('./sql_init');
|
||||||
const syncMutexService = require('./sync_mutex');
|
const syncMutexService = require('./sync_mutex');
|
||||||
|
const attributeService = require('./attributes');
|
||||||
const cls = require('./cls');
|
const cls = require('./cls');
|
||||||
|
const utils = require('./utils');
|
||||||
const sqlite = require('sqlite');
|
const sqlite = require('sqlite');
|
||||||
const sqlite3 = require('sqlite3');
|
const sqlite3 = require('sqlite3');
|
||||||
|
|
||||||
@ -45,7 +47,7 @@ async function copyFile(backupFile) {
|
|||||||
|
|
||||||
for (; attemptCount < COPY_ATTEMPT_COUNT && !success; attemptCount++) {
|
for (; attemptCount < COPY_ATTEMPT_COUNT && !success; attemptCount++) {
|
||||||
try {
|
try {
|
||||||
await sql.executeNoWrap(`VACUUM INTO '${backupFile}'`);
|
await sql.executeWithoutTransaction(`VACUUM INTO '${backupFile}'`);
|
||||||
|
|
||||||
success = true;
|
success = true;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@ -98,13 +100,20 @@ async function anonymize() {
|
|||||||
await db.run("UPDATE notes SET title = 'title'");
|
await db.run("UPDATE notes SET title = 'title'");
|
||||||
await db.run("UPDATE note_contents SET content = 'text' WHERE content IS NOT NULL");
|
await db.run("UPDATE note_contents SET content = 'text' WHERE content IS NOT NULL");
|
||||||
await db.run("UPDATE note_revisions SET title = 'title'");
|
await db.run("UPDATE note_revisions SET title = 'title'");
|
||||||
await db.run("UPDATE note_revision_contents SET content = 'title' WHERE content IS NOT NULL");
|
await db.run("UPDATE note_revision_contents SET content = 'text' WHERE content IS NOT NULL");
|
||||||
await db.run("UPDATE attributes SET name = 'name', value = 'value' WHERE type = 'label'");
|
|
||||||
await db.run("UPDATE attributes SET name = 'name' WHERE type = 'relation' AND name != 'template'");
|
// we want to delete all non-builtin attributes because they can contain sensitive names and values
|
||||||
|
// on the other hand builtin/system attrs should not contain any sensitive info
|
||||||
|
const builtinAttrs = attributeService.getBuiltinAttributeNames().map(name => "'" + utils.sanitizeSql(name) + "'").join(', ');
|
||||||
|
|
||||||
|
await db.run(`UPDATE attributes SET name = 'name', value = 'value' WHERE type = 'label' AND name NOT IN(${builtinAttrs})`);
|
||||||
|
await db.run(`UPDATE attributes SET name = 'name' WHERE type = 'relation' AND name NOT IN (${builtinAttrs})`);
|
||||||
await db.run("UPDATE branches SET prefix = 'prefix' WHERE prefix IS NOT NULL");
|
await db.run("UPDATE branches SET prefix = 'prefix' WHERE prefix IS NOT NULL");
|
||||||
await db.run(`UPDATE options SET value = 'anonymized' WHERE name IN
|
await db.run(`UPDATE options SET value = 'anonymized' WHERE name IN
|
||||||
('documentId', 'documentSecret', 'encryptedDataKey', 'passwordVerificationHash',
|
('documentId', 'documentSecret', 'encryptedDataKey',
|
||||||
'passwordVerificationSalt', 'passwordDerivedKeySalt', 'username', 'syncServerHost', 'syncProxy')`);
|
'passwordVerificationHash', 'passwordVerificationSalt',
|
||||||
|
'passwordDerivedKeySalt', 'username', 'syncServerHost', 'syncProxy')
|
||||||
|
AND value != ''`);
|
||||||
await db.run("VACUUM");
|
await db.run("VACUUM");
|
||||||
|
|
||||||
await db.close();
|
await db.close();
|
||||||
|
@ -1 +1 @@
|
|||||||
module.exports = { buildDate:"2020-06-03T14:30:07+02:00", buildRevision: "c1fd9825aa6087b5061cdede5dba3f7f9dc62c31" };
|
module.exports = { buildDate:"2020-06-15T23:26:12+02:00", buildRevision: "9791dab97d9e86c4b02ca593198caffd1b72bbfb" };
|
||||||
|
@ -9,6 +9,14 @@ function wrap(callback) {
|
|||||||
return async () => await init(callback);
|
return async () => await init(callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function get(key) {
|
||||||
|
return namespace.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
function set(key, value) {
|
||||||
|
namespace.set(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
function getSourceId() {
|
function getSourceId() {
|
||||||
return namespace.get('sourceId');
|
return namespace.get('sourceId');
|
||||||
}
|
}
|
||||||
@ -52,6 +60,8 @@ function setEntityToCache(entityName, entityId, entity) {
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
init,
|
init,
|
||||||
wrap,
|
wrap,
|
||||||
|
get,
|
||||||
|
set,
|
||||||
namespace,
|
namespace,
|
||||||
getSourceId,
|
getSourceId,
|
||||||
getLocalNowDateTime,
|
getLocalNowDateTime,
|
||||||
|
@ -403,7 +403,7 @@ async function importTar(taskContext, fileBuffer, importRootNote) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (const noteId in createdNoteIds) { // now the noteIds are unique
|
for (const noteId in createdNoteIds) { // now the noteIds are unique
|
||||||
await noteService.scanForLinks(await repository.getNotes(noteId));
|
await noteService.scanForLinks(await repository.getNote(noteId));
|
||||||
|
|
||||||
if (!metaFile) {
|
if (!metaFile) {
|
||||||
// if there's no meta file then the notes are created based on the order in that tar file but that
|
// if there's no meta file then the notes are created based on the order in that tar file but that
|
||||||
|
@ -434,7 +434,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
for (const noteId in createdNoteIds) { // now the noteIds are unique
|
for (const noteId in createdNoteIds) { // now the noteIds are unique
|
||||||
await noteService.scanForLinks(await repository.getNotes(noteId));
|
await noteService.scanForLinks(await repository.getNote(noteId));
|
||||||
|
|
||||||
if (!metaFile) {
|
if (!metaFile) {
|
||||||
// if there's no meta file then the notes are created based on the order in that tar file but that
|
// if there's no meta file then the notes are created based on the order in that tar file but that
|
||||||
|
@ -15,11 +15,11 @@ function setDataKey(decryptedDataKey) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function setProtectedSessionId(req) {
|
function setProtectedSessionId(req) {
|
||||||
cls.namespace.set('protectedSessionId', req.cookies.protectedSessionId);
|
cls.set('protectedSessionId', req.cookies.protectedSessionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getProtectedSessionId() {
|
function getProtectedSessionId() {
|
||||||
return cls.namespace.get('protectedSessionId');
|
return cls.get('protectedSessionId');
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDataKey() {
|
function getDataKey() {
|
||||||
|
@ -140,10 +140,6 @@ async function updateEntity(entity) {
|
|||||||
await eventService.emit(entity.isDeleted ? eventService.ENTITY_DELETED : eventService.ENTITY_CHANGED, eventPayload);
|
await eventService.emit(entity.isDeleted ? eventService.ENTITY_DELETED : eventService.ENTITY_CHANGED, eventPayload);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (entity.afterSaving) {
|
|
||||||
await entity.afterSaving();
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,12 +9,13 @@ const syncOptions = require('./sync_options');
|
|||||||
// this allows to support system proxy
|
// this allows to support system proxy
|
||||||
|
|
||||||
function exec(opts) {
|
function exec(opts) {
|
||||||
|
const client = getClient(opts);
|
||||||
|
|
||||||
// hack for cases where electron.net does not work but we don't want to set proxy
|
// hack for cases where electron.net does not work but we don't want to set proxy
|
||||||
if (opts.proxy === 'noproxy') {
|
if (opts.proxy === 'noproxy') {
|
||||||
opts.proxy = null;
|
opts.proxy = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const client = getClient(opts);
|
|
||||||
const proxyAgent = getProxyAgent(opts);
|
const proxyAgent = getProxyAgent(opts);
|
||||||
const parsedTargetUrl = url.parse(opts.url);
|
const parsedTargetUrl = url.parse(opts.url);
|
||||||
|
|
||||||
@ -40,7 +41,7 @@ function exec(opts) {
|
|||||||
host: parsedTargetUrl.hostname,
|
host: parsedTargetUrl.hostname,
|
||||||
port: parsedTargetUrl.port,
|
port: parsedTargetUrl.port,
|
||||||
path: parsedTargetUrl.path,
|
path: parsedTargetUrl.path,
|
||||||
timeout: opts.timeout,
|
timeout: opts.timeout, // works only for node.js client
|
||||||
headers,
|
headers,
|
||||||
agent: proxyAgent
|
agent: proxyAgent
|
||||||
});
|
});
|
||||||
@ -104,13 +105,15 @@ async function getImage(imageUrl) {
|
|||||||
host: parsedTargetUrl.hostname,
|
host: parsedTargetUrl.hostname,
|
||||||
port: parsedTargetUrl.port,
|
port: parsedTargetUrl.port,
|
||||||
path: parsedTargetUrl.path,
|
path: parsedTargetUrl.path,
|
||||||
timeout: opts.timeout,
|
timeout: opts.timeout, // works only for node client
|
||||||
headers: {},
|
headers: {},
|
||||||
agent: proxyAgent
|
agent: proxyAgent
|
||||||
});
|
});
|
||||||
|
|
||||||
request.on('error', err => reject(generateError(opts, err)));
|
request.on('error', err => reject(generateError(opts, err)));
|
||||||
|
|
||||||
|
request.on('abort', err => reject(generateError(opts, err)));
|
||||||
|
|
||||||
request.on('response', response => {
|
request.on('response', response => {
|
||||||
if (![200, 201, 204].includes(response.statusCode)) {
|
if (![200, 201, 204].includes(response.statusCode)) {
|
||||||
reject(generateError(opts, response.statusCode + ' ' + response.statusMessage));
|
reject(generateError(opts, response.statusCode + ' ' + response.statusMessage));
|
||||||
|
@ -31,7 +31,7 @@ async function executeBundle(bundle, apiParams = {}) {
|
|||||||
apiParams.startNote = bundle.note;
|
apiParams.startNote = bundle.note;
|
||||||
}
|
}
|
||||||
|
|
||||||
cls.namespace.set('sourceId', 'script');
|
cls.set('sourceId', 'script');
|
||||||
|
|
||||||
// last \r\n is necessary if script contains line comment on its last line
|
// last \r\n is necessary if script contains line comment on its last line
|
||||||
const script = "async function() {\r\n" + bundle.script + "\r\n}";
|
const script = "async function() {\r\n" + bundle.script + "\r\n}";
|
||||||
|
@ -6,6 +6,7 @@ const optionService = require('./options');
|
|||||||
const syncOptions = require('./sync_options');
|
const syncOptions = require('./sync_options');
|
||||||
const request = require('./request');
|
const request = require('./request');
|
||||||
const appInfo = require('./app_info');
|
const appInfo = require('./app_info');
|
||||||
|
const utils = require('./utils');
|
||||||
|
|
||||||
async function hasSyncServerSchemaAndSeed() {
|
async function hasSyncServerSchemaAndSeed() {
|
||||||
const response = await requestToSyncServer('GET', '/api/setup/status');
|
const response = await requestToSyncServer('GET', '/api/setup/status');
|
||||||
@ -43,13 +44,15 @@ async function sendSeedToSyncServer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function requestToSyncServer(method, path, body = null) {
|
async function requestToSyncServer(method, path, body = null) {
|
||||||
return await request.exec({
|
const timeout = await syncOptions.getSyncTimeout();
|
||||||
|
|
||||||
|
return utils.timeLimit(request.exec({
|
||||||
method,
|
method,
|
||||||
url: await syncOptions.getSyncServerHost() + path,
|
url: await syncOptions.getSyncServerHost() + path,
|
||||||
body,
|
body,
|
||||||
proxy: await syncOptions.getSyncProxy(),
|
proxy: await syncOptions.getSyncProxy(),
|
||||||
timeout: await syncOptions.getSyncTimeout()
|
timeout: timeout
|
||||||
});
|
}), timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function setupSyncFromSyncServer(syncServerHost, syncProxy, username, password) {
|
async function setupSyncFromSyncServer(syncServerHost, syncProxy, username, password) {
|
||||||
|
@ -9,7 +9,7 @@ function setDbConnection(connection) {
|
|||||||
dbConnection = connection;
|
dbConnection = connection;
|
||||||
}
|
}
|
||||||
|
|
||||||
[`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `uncaughtException`, `SIGTERM`].forEach(eventType => {
|
[`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `SIGTERM`].forEach(eventType => {
|
||||||
process.on(eventType, () => {
|
process.on(eventType, () => {
|
||||||
if (dbConnection) {
|
if (dbConnection) {
|
||||||
// closing connection is especially important to fold -wal file into the main DB file
|
// closing connection is especially important to fold -wal file into the main DB file
|
||||||
@ -33,7 +33,7 @@ async function insert(tableName, rec, replace = false) {
|
|||||||
|
|
||||||
const res = await execute(query, Object.values(rec));
|
const res = await execute(query, Object.values(rec));
|
||||||
|
|
||||||
return res.lastID;
|
return res.lastInsertRowid;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function replace(tableName, rec) {
|
async function replace(tableName, rec) {
|
||||||
@ -49,34 +49,46 @@ async function upsert(tableName, primaryKey, rec) {
|
|||||||
|
|
||||||
const columns = keys.join(", ");
|
const columns = keys.join(", ");
|
||||||
|
|
||||||
let i = 0;
|
const questionMarks = keys.map(colName => "@" + colName).join(", ");
|
||||||
|
|
||||||
const questionMarks = keys.map(p => ":" + i++).join(", ");
|
const updateMarks = keys.map(colName => `${colName} = @${colName}`).join(", ");
|
||||||
|
|
||||||
i = 0;
|
|
||||||
|
|
||||||
const updateMarks = keys.map(key => `${key} = :${i++}`).join(", ");
|
|
||||||
|
|
||||||
const query = `INSERT INTO ${tableName} (${columns}) VALUES (${questionMarks})
|
const query = `INSERT INTO ${tableName} (${columns}) VALUES (${questionMarks})
|
||||||
ON CONFLICT (${primaryKey}) DO UPDATE SET ${updateMarks}`;
|
ON CONFLICT (${primaryKey}) DO UPDATE SET ${updateMarks}`;
|
||||||
|
|
||||||
await execute(query, Object.values(rec));
|
for (const idx in rec) {
|
||||||
|
if (rec[idx] === true || rec[idx] === false) {
|
||||||
|
rec[idx] = rec[idx] ? 1 : 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function beginTransaction() {
|
await execute(query, rec);
|
||||||
return await execute("BEGIN");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function commit() {
|
const statementCache = {};
|
||||||
return await execute("COMMIT");
|
|
||||||
|
function stmt(sql) {
|
||||||
|
if (!(sql in statementCache)) {
|
||||||
|
statementCache[sql] = dbConnection.prepare(sql);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function rollback() {
|
return statementCache[sql];
|
||||||
return await execute("ROLLBACK");
|
}
|
||||||
|
|
||||||
|
function beginTransaction() {
|
||||||
|
return stmt("BEGIN").run();
|
||||||
|
}
|
||||||
|
|
||||||
|
function commit() {
|
||||||
|
return stmt("COMMIT").run();
|
||||||
|
}
|
||||||
|
|
||||||
|
function rollback() {
|
||||||
|
return stmt("ROLLBACK").run();
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRow(query, params = []) {
|
async function getRow(query, params = []) {
|
||||||
return await wrap(async db => db.get(query, ...params), query);
|
return wrap(() => stmt(query).get(params), query);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRowOrNull(query, params = []) {
|
async function getRowOrNull(query, params = []) {
|
||||||
@ -105,18 +117,25 @@ async function getManyRows(query, params) {
|
|||||||
const curParams = params.slice(0, Math.min(params.length, PARAM_LIMIT));
|
const curParams = params.slice(0, Math.min(params.length, PARAM_LIMIT));
|
||||||
params = params.slice(curParams.length);
|
params = params.slice(curParams.length);
|
||||||
|
|
||||||
|
const curParamsObj = {};
|
||||||
|
|
||||||
|
let j = 1;
|
||||||
|
for (const param of curParams) {
|
||||||
|
curParamsObj['param' + j++] = param;
|
||||||
|
}
|
||||||
|
|
||||||
let i = 1;
|
let i = 1;
|
||||||
const questionMarks = curParams.map(() => "?" + i++).join(",");
|
const questionMarks = curParams.map(() => ":param" + i++).join(",");
|
||||||
const curQuery = query.replace(/\?\?\?/g, questionMarks);
|
const curQuery = query.replace(/\?\?\?/g, questionMarks);
|
||||||
|
|
||||||
results = results.concat(await getRows(curQuery, curParams));
|
results = results.concat(await getRows(curQuery, curParamsObj));
|
||||||
}
|
}
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRows(query, params = []) {
|
async function getRows(query, params = []) {
|
||||||
return await wrap(async db => db.all(query, ...params), query);
|
return wrap(() => stmt(query).all(params), query);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getMap(query, params = []) {
|
async function getMap(query, params = []) {
|
||||||
@ -150,23 +169,29 @@ async function getColumn(query, params = []) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function execute(query, params = []) {
|
async function execute(query, params = []) {
|
||||||
return await wrap(async db => db.run(query, ...params), query);
|
await startTransactionIfNecessary();
|
||||||
|
|
||||||
|
return wrap(() => stmt(query).run(params), query);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function executeNoWrap(query, params = []) {
|
async function executeWithoutTransaction(query, params = []) {
|
||||||
await dbConnection.run(query, ...params);
|
await dbConnection.run(query, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function executeMany(query, params) {
|
async function executeMany(query, params) {
|
||||||
|
await startTransactionIfNecessary();
|
||||||
|
|
||||||
// essentially just alias
|
// essentially just alias
|
||||||
await getManyRows(query, params);
|
await getManyRows(query, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function executeScript(query) {
|
async function executeScript(query) {
|
||||||
return await wrap(async db => db.exec(query), query);
|
await startTransactionIfNecessary();
|
||||||
|
|
||||||
|
return wrap(() => stmt.run(query), query);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function wrap(func, query) {
|
function wrap(func, query) {
|
||||||
if (!dbConnection) {
|
if (!dbConnection) {
|
||||||
throw new Error("DB connection not initialized yet");
|
throw new Error("DB connection not initialized yet");
|
||||||
}
|
}
|
||||||
@ -176,7 +201,7 @@ async function wrap(func, query) {
|
|||||||
try {
|
try {
|
||||||
const startTimestamp = Date.now();
|
const startTimestamp = Date.now();
|
||||||
|
|
||||||
const result = await func(dbConnection);
|
const result = func(dbConnection);
|
||||||
|
|
||||||
const milliseconds = Date.now() - startTimestamp;
|
const milliseconds = Date.now() - startTimestamp;
|
||||||
if (milliseconds >= 300) {
|
if (milliseconds >= 300) {
|
||||||
@ -199,62 +224,69 @@ async function wrap(func, query) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// true if transaction is active globally.
|
||||||
|
// cls.namespace.get('isTransactional') OTOH indicates active transaction in active CLS
|
||||||
let transactionActive = false;
|
let transactionActive = false;
|
||||||
|
// resolves when current transaction ends with either COMMIT or ROLLBACK
|
||||||
let transactionPromise = null;
|
let transactionPromise = null;
|
||||||
|
let transactionPromiseResolve = null;
|
||||||
|
|
||||||
async function transactional(func) {
|
async function startTransactionIfNecessary() {
|
||||||
if (cls.namespace.get('isInTransaction')) {
|
if (!cls.get('isTransactional')
|
||||||
return await func();
|
|| cls.get('isInTransaction')) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (transactionActive) {
|
while (transactionActive) {
|
||||||
await transactionPromise;
|
await transactionPromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
let ret = null;
|
// first set semaphore (atomic operation and only then start transaction
|
||||||
const thisError = new Error(); // to capture correct stack trace in case of exception
|
|
||||||
|
|
||||||
transactionActive = true;
|
transactionActive = true;
|
||||||
transactionPromise = new Promise(async (resolve, reject) => {
|
transactionPromise = new Promise(res => transactionPromiseResolve = res);
|
||||||
try {
|
cls.set('isInTransaction', true);
|
||||||
|
|
||||||
await beginTransaction();
|
await beginTransaction();
|
||||||
|
}
|
||||||
|
|
||||||
cls.namespace.set('isInTransaction', true);
|
async function transactional(func) {
|
||||||
|
// if the CLS is already transactional then the whole transaction is handled by higher level transactional() call
|
||||||
|
if (cls.get('isTransactional')) {
|
||||||
|
return await func();
|
||||||
|
}
|
||||||
|
|
||||||
ret = await func();
|
cls.set('isTransactional', true); // this signals that transaction will be needed if there's a write operation
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ret = await func();
|
||||||
|
|
||||||
|
if (cls.get('isInTransaction')) {
|
||||||
await commit();
|
await commit();
|
||||||
|
|
||||||
// note that sync rows sent from this action will be sent again by scheduled periodic ping
|
// note that sync rows sent from this action will be sent again by scheduled periodic ping
|
||||||
require('./ws.js').sendPingToAllClients();
|
require('./ws.js').sendPingToAllClients();
|
||||||
|
|
||||||
transactionActive = false;
|
|
||||||
resolve();
|
|
||||||
|
|
||||||
setTimeout(() => require('./ws').sendPingToAllClients(), 50);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
if (transactionActive) {
|
|
||||||
log.error("Error executing transaction, executing rollback. Inner stack: " + e.stack + "\nOutside stack: " + thisError.stack);
|
|
||||||
|
|
||||||
await rollback();
|
|
||||||
|
|
||||||
transactionActive = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
reject(e);
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
cls.namespace.set('isInTransaction', false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (transactionActive) {
|
|
||||||
await transactionPromise;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
catch (e) {
|
||||||
|
if (cls.get('isInTransaction')) {
|
||||||
|
await rollback();
|
||||||
|
}
|
||||||
|
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
cls.namespace.set('isTransactional', false);
|
||||||
|
|
||||||
|
if (cls.namespace.get('isInTransaction')) {
|
||||||
|
transactionActive = false;
|
||||||
|
cls.namespace.set('isInTransaction', false);
|
||||||
|
// resolving even for rollback since this is just semaphore for allowing another write transaction to proceed
|
||||||
|
transactionPromiseResolve();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
setDbConnection,
|
setDbConnection,
|
||||||
@ -268,7 +300,7 @@ module.exports = {
|
|||||||
getMap,
|
getMap,
|
||||||
getColumn,
|
getColumn,
|
||||||
execute,
|
execute,
|
||||||
executeNoWrap,
|
executeWithoutTransaction,
|
||||||
executeMany,
|
executeMany,
|
||||||
executeScript,
|
executeScript,
|
||||||
transactional,
|
transactional,
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
const log = require('./log');
|
const log = require('./log');
|
||||||
const dataDir = require('./data_dir');
|
const dataDir = require('./data_dir');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const sqlite = require('sqlite');
|
|
||||||
const sqlite3 = require('sqlite3');
|
|
||||||
const resourceDir = require('./resource_dir');
|
const resourceDir = require('./resource_dir');
|
||||||
const appInfo = require('./app_info');
|
const appInfo = require('./app_info');
|
||||||
const sql = require('./sql');
|
const sql = require('./sql');
|
||||||
@ -12,28 +10,14 @@ const optionService = require('./options');
|
|||||||
const port = require('./port');
|
const port = require('./port');
|
||||||
const Option = require('../entities/option');
|
const Option = require('../entities/option');
|
||||||
const TaskContext = require('./task_context.js');
|
const TaskContext = require('./task_context.js');
|
||||||
|
const Database = require('better-sqlite3');
|
||||||
|
|
||||||
const dbConnection = new Promise(async (resolve, reject) => {
|
const dbConnection = new Database(dataDir.DOCUMENT_PATH);
|
||||||
const db = await sqlite.open({
|
dbConnection.pragma('journal_mode = WAL');
|
||||||
filename: dataDir.DOCUMENT_PATH,
|
|
||||||
driver: sqlite3.Database
|
|
||||||
});
|
|
||||||
|
|
||||||
db.run('PRAGMA journal_mode = WAL;');
|
sql.setDbConnection(dbConnection);
|
||||||
|
|
||||||
sql.setDbConnection(db);
|
const dbReady = initDbConnection();
|
||||||
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
|
|
||||||
let dbReadyResolve = null;
|
|
||||||
const dbReady = new Promise(async (resolve, reject) => {
|
|
||||||
dbReadyResolve = resolve;
|
|
||||||
|
|
||||||
await dbConnection;
|
|
||||||
|
|
||||||
initDbConnection();
|
|
||||||
});
|
|
||||||
|
|
||||||
async function schemaExists() {
|
async function schemaExists() {
|
||||||
const tableResults = await sql.getRows("SELECT name FROM sqlite_master WHERE type='table' AND name='options'");
|
const tableResults = await sql.getRows("SELECT name FROM sqlite_master WHERE type='table' AND name='options'");
|
||||||
@ -78,7 +62,6 @@ async function initDbConnection() {
|
|||||||
await require('./options_init').initStartupOptions();
|
await require('./options_init').initStartupOptions();
|
||||||
|
|
||||||
log.info("DB ready.");
|
log.info("DB ready.");
|
||||||
dbReadyResolve();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -189,7 +172,6 @@ dbReady.then(async () => {
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
dbReady,
|
dbReady,
|
||||||
dbConnection,
|
|
||||||
schemaExists,
|
schemaExists,
|
||||||
isDbInitialized,
|
isDbInitialized,
|
||||||
initDbConnection,
|
initDbConnection,
|
||||||
|
@ -70,7 +70,7 @@ async function sync() {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
log.info("sync failed: " + e.message + e.stack);
|
log.info("sync failed: " + e.message + "\nstack: " + e.stack);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
@ -97,7 +97,6 @@ async function doLogin() {
|
|||||||
const hash = utils.hmac(documentSecret, timestamp);
|
const hash = utils.hmac(documentSecret, timestamp);
|
||||||
|
|
||||||
const syncContext = { cookieJar: {} };
|
const syncContext = { cookieJar: {} };
|
||||||
|
|
||||||
const resp = await syncRequest(syncContext, 'POST', '/api/login/sync', {
|
const resp = await syncRequest(syncContext, 'POST', '/api/login/sync', {
|
||||||
timestamp: timestamp,
|
timestamp: timestamp,
|
||||||
syncVersion: appInfo.syncVersion,
|
syncVersion: appInfo.syncVersion,
|
||||||
@ -259,14 +258,18 @@ async function checkContentHash(syncContext) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function syncRequest(syncContext, method, requestPath, body) {
|
async function syncRequest(syncContext, method, requestPath, body) {
|
||||||
return await request.exec({
|
const timeout = await syncOptions.getSyncTimeout();
|
||||||
|
|
||||||
|
const opts = {
|
||||||
method,
|
method,
|
||||||
url: await syncOptions.getSyncServerHost() + requestPath,
|
url: await syncOptions.getSyncServerHost() + requestPath,
|
||||||
cookieJar: syncContext.cookieJar,
|
cookieJar: syncContext.cookieJar,
|
||||||
timeout: await syncOptions.getSyncTimeout(),
|
timeout: timeout,
|
||||||
body,
|
body,
|
||||||
proxy: proxyToggle ? await syncOptions.getSyncProxy() : null
|
proxy: proxyToggle ? await syncOptions.getSyncProxy() : null
|
||||||
});
|
};
|
||||||
|
|
||||||
|
return await utils.timeLimit(request.exec(opts), timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
const primaryKeys = {
|
const primaryKeys = {
|
||||||
@ -369,7 +372,7 @@ sqlInit.dbReady.then(async () => {
|
|||||||
setInterval(cls.wrap(sync), 60000);
|
setInterval(cls.wrap(sync), 60000);
|
||||||
|
|
||||||
// kickoff initial sync immediately
|
// kickoff initial sync immediately
|
||||||
setTimeout(cls.wrap(sync), 1000);
|
setTimeout(cls.wrap(sync), 3000);
|
||||||
|
|
||||||
setInterval(cls.wrap(updatePushStats), 1000);
|
setInterval(cls.wrap(updatePushStats), 1000);
|
||||||
});
|
});
|
||||||
|
@ -206,6 +206,14 @@ function formatDownloadTitle(filename, type, mime) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (mime === 'application/octet-stream') {
|
||||||
|
// we didn't find any good guess for this one, it will be better to just return
|
||||||
|
// the current name without fake extension. It's possible that the title still preserves to correct
|
||||||
|
// extension too
|
||||||
|
|
||||||
|
return filename;
|
||||||
|
}
|
||||||
|
|
||||||
return filename + '.' + extensions[0];
|
return filename + '.' + extensions[0];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -233,6 +241,24 @@ function getNoteTitle(filePath, replaceUnderscoresWithSpaces, noteMeta) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function timeLimit(promise, limitMs) {
|
||||||
|
return new Promise((res, rej) => {
|
||||||
|
let resolved = false;
|
||||||
|
|
||||||
|
promise.then(result => {
|
||||||
|
resolved = true;
|
||||||
|
|
||||||
|
res(result);
|
||||||
|
});
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!resolved) {
|
||||||
|
rej(new Error('Process exceeded time limit ' + limitMs));
|
||||||
|
}
|
||||||
|
}, limitMs);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
randomSecureToken,
|
randomSecureToken,
|
||||||
randomString,
|
randomString,
|
||||||
@ -261,7 +287,8 @@ module.exports = {
|
|||||||
isStringNote,
|
isStringNote,
|
||||||
quoteRegex,
|
quoteRegex,
|
||||||
replaceAll,
|
replaceAll,
|
||||||
formatDownloadTitle,
|
|
||||||
getNoteTitle,
|
getNoteTitle,
|
||||||
removeTextFileExtension,
|
removeTextFileExtension,
|
||||||
|
formatDownloadTitle,
|
||||||
|
timeLimit
|
||||||
};
|
};
|
||||||
|
@ -36,7 +36,7 @@ function init(httpServer, sessionParser) {
|
|||||||
const message = JSON.parse(messageJson);
|
const message = JSON.parse(messageJson);
|
||||||
|
|
||||||
if (message.type === 'log-error') {
|
if (message.type === 'log-error') {
|
||||||
log.error('JS Error: ' + message.error);
|
log.info('JS Error: ' + message.error + '\r\nStack: ' + message.stack);
|
||||||
}
|
}
|
||||||
else if (message.type === 'ping') {
|
else if (message.type === 'ping') {
|
||||||
lastAcceptedSyncIds[ws.id] = message.lastSyncId;
|
lastAcceptedSyncIds[ws.id] = message.lastSyncId;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user