Merge remote-tracking branch 'origin/better-sqlite3'

# Conflicts:
#	libraries/ckeditor/ckeditor.js
#	libraries/ckeditor/ckeditor.js.map
#	package-lock.json
#	package.json
#	src/public/app/services/utils.js
#	src/public/app/widgets/type_widgets/editable_text.js
#	src/services/utils.js
This commit is contained in:
zadam 2020-06-18 09:03:09 +02:00
commit a7d9870846
57 changed files with 1384 additions and 1176 deletions

View File

@ -33,6 +33,9 @@ find $DIR/libraries -name "*.map" -type f -delete
rm -r $DIR/src/public/app
rm -r $DIR/node_modules/sqlite3/build
rm -r $DIR/node_modules/sqlite3/deps
sed -i -e 's/app\/desktop.js/app-dist\/desktop.js/g' $DIR/src/views/desktop.ejs
sed -i -e 's/app\/mobile.js/app-dist\/mobile.js/g' $DIR/src/views/mobile.ejs
sed -i -e 's/app\/setup.js/app-dist\/setup.js/g' $DIR/src/views/setup.ejs
sed -i -e 's/app\/setup.js/app-dist\/setup.js/g' $DIR/src/views/setup.ejs

Binary file not shown.

View File

@ -24,8 +24,6 @@ app.on('window-all-closed', () => {
app.on('ready', async () => {
app.setAppUserModelId('com.github.zadam.trilium');
await sqlInit.dbConnection;
// if db is not initialized -> setup process
// if db is initialized, then we need to wait until the migration process is finished
if (await sqlInit.isDbInitialized()) {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1724
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@
"name": "trilium",
"productName": "Trilium Notes",
"description": "Trilium Notes",
"version": "0.42.3",
"version": "0.43.0-beta",
"license": "AGPL-3.0-only",
"main": "electron.js",
"bin": {
@ -25,6 +25,7 @@
"dependencies": {
"async-mutex": "0.2.2",
"axios": "0.19.2",
"better-sqlite3": "^7.1.0",
"body-parser": "1.19.0",
"cls-hooked": "4.2.2",
"commonmark": "0.29.1",
@ -81,7 +82,7 @@
},
"devDependencies": {
"cross-env": "^7.0.2",
"electron": "9.0.2",
"electron": "9.0.4",
"electron-builder": "22.7.0",
"electron-packager": "14.2.1",
"electron-rebuild": "1.11.0",

View File

@ -1,12 +1,24 @@
const backupService = require('./services/backup');
const sqlInit = require('./services/sql_init');
require('./entities/entity_constructor');
backupService.anonymize().then(resp => {
if (resp.success) {
console.log("Anonymization failed.");
sqlInit.dbReady.then(async () => {
try {
console.log("Starting anonymization...");
const resp = await backupService.anonymize();
if (resp.success) {
console.log("Anonymized file has been saved to: " + resp.anonymizedFilePath);
process.exit(0);
} else {
console.log("Anonymization failed.");
}
}
else {
console.log("Anonymized file has been saved to: " + resp.anonymizedFilePath);
catch (e) {
console.error(e.message, e.stack);
}
process.exit(0);
process.exit(1);
});

View File

@ -31,17 +31,6 @@ app.use((req, res, next) => {
next();
});
app.use((req, res, next) => {
cls.namespace.bindEmitter(req);
cls.namespace.bindEmitter(res);
cls.init(() => {
cls.namespace.set("Hi");
next();
});
});
app.use(bodyParser.json({limit: '500mb'}));
app.use(bodyParser.urlencoded({extended: false}));
app.use(cookieParser());

View File

@ -105,7 +105,7 @@ class Attribute extends Entity {
// cannot be static!
updatePojo(pojo) {
delete pojo.__note;
delete pojo.__note; // FIXME: probably note necessary anymore
}
createClone(type, name, value, isInheritable) {

View File

@ -152,10 +152,10 @@ function AttributesModel() {
attr.value = treeService.getNoteIdFromNotePath(attr.selectedPath);
}
else if (attr.type === 'label-definition') {
attr.value = attr.labelDefinition;
attr.value = JSON.stringify(attr.labelDefinition);
}
else if (attr.type === 'relation-definition') {
attr.value = attr.relationDefinition;
attr.value = JSON.stringify(attr.relationDefinition);
}
delete attr.labelValue;

View File

@ -16,18 +16,18 @@ export async function showDialog(ancestorNoteId) {
ancestorNoteId = hoistedNoteService.getHoistedNoteId();
}
const result = await server.get('recent-changes/' + ancestorNoteId);
const recentChangesRows = await server.get('recent-changes/' + ancestorNoteId);
// preload all notes into cache
await treeCache.getNotes(result.map(r => r.noteId), true);
await treeCache.getNotes(recentChangesRows.map(r => r.noteId), true);
$content.empty();
if (result.length === 0) {
if (recentChangesRows.length === 0) {
$content.append("No changes yet ...");
}
const groupedByDate = groupByDate(result);
const groupedByDate = groupByDate(recentChangesRows);
for (const [dateDay, dayChanges] of groupedByDate) {
const $changesList = $('<ul>');
@ -95,10 +95,10 @@ export async function showDialog(ancestorNoteId) {
}
}
function groupByDate(result) {
function groupByDate(rows) {
const groupedByDate = new Map();
for (const row of result) {
for (const row of rows) {
const dateDay = row.date.substr(0, 10);
if (!groupedByDate.has(dateDay)) {

View File

@ -23,8 +23,12 @@ class Attribute {
}
/** @returns {NoteShort} */
async getNote() {
return await this.treeCache.getNote(this.noteId);
getNote() {
return this.treeCache.notes[this.noteId];
}
get targetNoteId() { // alias
return this.type === 'relation' ? this.value : undefined;
}
get jsonValue() {
@ -43,6 +47,34 @@ class Attribute {
get toString() {
return `Attribute(attributeId=${this.attributeId}, type=${this.type}, name=${this.name}, value=${this.value})`;
}
/**
* @return {boolean} - returns true if this attribute has the potential to influence the note in the argument.
* That can happen in multiple ways:
* 1. attribute is owned by the note
* 2. attribute is owned by the template of the note
* 3. attribute is owned by some note's ancestor and is inheritable
*/
isAffecting(affectedNote) {
const attrNote = this.getNote();
const owningNotes = [affectedNote, ...affectedNote.getTemplateNotes()];
for (const owningNote of owningNotes) {
if (owningNote.noteId === attrNote.noteId) {
return true;
}
}
if (this.isInheritable) {
for (const owningNote of owningNotes) {
if (owningNote.hasAncestor(attrNote)) {
return true;
}
}
}
return false;
}
}
export default Attribute;

View File

@ -437,6 +437,35 @@ class NoteShort {
return targets;
}
/**
* @returns {NoteShort[]}
*/
getTemplateNotes() {
const relations = this.getRelations('template');
return relations.map(rel => this.treeCache.notes[rel.value]);
}
hasAncestor(ancestorNote) {
if (this.noteId === ancestorNote.noteId) {
return true;
}
for (const templateNote of this.getTemplateNotes()) {
if (templateNote.hasAncestor(ancestorNote)) {
return true;
}
}
for (const parentNote of this.getParentNotes()) {
if (parentNote.hasAncestor(ancestorNote)) {console.log(parentNote);
return true;
}
}
return false;
}
/**
* Clear note's attributes cache to force fresh reload for next attribute request.
* Cache is note instance scoped.
@ -455,6 +484,15 @@ class NoteShort {
.map(attributeId => this.treeCache.attributes[attributeId]);
}
/**
* Return note complement which is most importantly note's content
*
* @return {Promise<NoteComplement>}
*/
async getNoteComplement() {
return await this.treeCache.getNoteComplement(this.noteId);
}
get toString() {
return `Note(noteId=${this.noteId}, title=${this.title})`;
}

View File

@ -1,7 +1,6 @@
import ScriptContext from "./script_context.js";
import server from "./server.js";
import toastService from "./toast.js";
import treeCache from "./tree_cache.js";
async function getAndExecuteBundle(noteId, originEntity = null) {
const bundle = await server.get('script/bundle/' + noteId);
@ -77,4 +76,4 @@ export default {
getAndExecuteBundle,
executeStartupBundles,
getWidgetBundlesByParent
}
}

View File

@ -403,6 +403,13 @@ function FrontendScriptApi(startNote, currentNote, originEntity = null, $contain
* @method
*/
this.waitUntilSynced = ws.waitForMaxKnownSyncId;
/**
* This will refresh all currently opened notes which have included note specified in the parameter
*
* @param includedNoteId - noteId of the included note
*/
this.refreshIncludedNote = includedNoteId => appContext.triggerEvent('refreshIncludedNote', {noteId: includedNoteId});
}
export default FrontendScriptApi;
export default FrontendScriptApi;

View File

@ -49,11 +49,7 @@ function setupGlobs() {
let message = "Uncaught error: ";
if (string.includes("Cannot read property 'defaultView' of undefined")) {
// ignore this specific error which is very common but we don't know where it comes from
// and it seems to be harmless
return true;
} else if (string.includes("script error")) {
if (string.includes("script error")) {
message += 'No details available';
} else {
message += [
@ -61,8 +57,9 @@ function setupGlobs() {
'URL: ' + url,
'Line: ' + lineNo,
'Column: ' + columnNo,
'Error object: ' + JSON.stringify(error)
].join(' - ');
'Error object: ' + JSON.stringify(error),
'Stack: ' + error && error.stack
].join(', ');
}
ws.logError(message);

View File

@ -137,7 +137,7 @@ function linkContextMenu(e) {
$(document).on('mousedown', "a[data-action='note']", goToLink);
$(document).on('mousedown', 'div.popover-content a, div.ui-tooltip-content a', goToLink);
$(document).on('dblclick', '.note-detail-text a', goToLink);
$(document).on('mousedown', '.note-detail-text a', function (e) {
$(document).on('mousedown', '.note-detail-text a:not(.reference-link)', function (e) {
const $link = $(e.target).closest("a");
const notePath = getNotePathFromLink($link);
@ -161,6 +161,7 @@ $(document).on('mousedown', '.note-detail-text a', function (e) {
$(document).on('mousedown', '.note-detail-book a', goToLink);
$(document).on('mousedown', '.note-detail-render a', goToLink);
$(document).on('mousedown', '.note-detail-text a.reference-link', goToLink);
$(document).on('mousedown', '.note-detail-readonly-text a.reference-link', goToLink);
$(document).on('mousedown', '.note-detail-readonly-text a', goToLink);
$(document).on('mousedown', 'a.ck-link-actions__preview', goToLink);
$(document).on('click', 'a.ck-link-actions__preview', e => {

View File

@ -54,8 +54,9 @@ export default class LoadResults {
this.attributes.push({attributeId, sourceId});
}
getAttributes() {
getAttributes(sourceId = 'none') {
return this.attributes
.filter(row => row.sourceId !== sourceId)
.map(row => this.treeCache.attributes[row.attributeId])
.filter(attr => !!attr);
}

View File

@ -1,29 +0,0 @@
export default class Mutex {
constructor() {
this.queue = [];
this.pending = false;
}
isLocked() {
return this.pending;
}
acquire() {
const ticket = new Promise(resolve => this.queue.push(resolve));
if (!this.pending) {
this.dispatchNext();
}
return ticket;
}
dispatchNext() {
if (this.queue.length > 0) {
this.pending = true;
this.queue.shift()(this.dispatchNext.bind(this));
} else {
this.pending = false;
}
}
}

View File

@ -8,8 +8,8 @@ async function syncNow() {
toastService.showMessage("Sync finished successfully.");
}
else {
if (result.message.length > 50) {
result.message = result.message.substr(0, 50);
if (result.message.length > 100) {
result.message = result.message.substr(0, 100);
}
toastService.showError("Sync failed: " + result.message);
@ -25,4 +25,4 @@ async function forceNoteSync(noteId) {
export default {
syncNow,
forceNoteSync
};
};

View File

@ -22,7 +22,7 @@ async function resolveNotePath(notePath) {
*
* @return {string[]}
*/
async function getRunPath(notePath) {
async function getRunPath(notePath, logErrors = true) {
utils.assertArguments(notePath);
notePath = notePath.split("-")[0].trim();
@ -66,10 +66,14 @@ async function getRunPath(notePath) {
}
if (!parents.some(p => p.noteId === parentNoteId)) {
console.debug(utils.now(), "Did not find parent " + parentNoteId + " for child " + childNoteId);
if (logErrors) {
console.log(utils.now(), "Did not find parent " + parentNoteId + " for child " + childNoteId);
}
if (parents.length > 0) {
console.debug(utils.now(), "Available parents:", parents);
if (logErrors) {
console.log(utils.now(), "Available parents:", parents);
}
const someNotePath = getSomeNotePath(parents[0]);
@ -86,7 +90,10 @@ async function getRunPath(notePath) {
break;
}
else {
console.log("No parents so no run path.");
if (logErrors) {
console.log("No parents so no run path.");
}
return;
}
}

View File

@ -54,6 +54,10 @@ class TreeCache {
if (attr.type === 'relation' && attr.name === 'template' && !(attr.value in existingNotes) && !noteIds.has(attr.value)) {
missingNoteIds.push(attr.value);
}
if (!(attr.noteId in existingNotes) && !noteIds.has(attr.noteId)) {
missingNoteIds.push(attr.noteId);
}
}
if (missingNoteIds.length > 0) {
@ -272,6 +276,9 @@ class TreeCache {
return child.parentToBranch[parentNoteId];
}
/**
* @return {Promise<NoteComplement>}
*/
async getNoteComplement(noteId) {
if (!this.noteComplementPromises[noteId]) {
this.noteComplementPromises[noteId] = server.get('notes/' + noteId).then(row => new NoteComplement(row));
@ -283,4 +290,4 @@ class TreeCache {
const treeCache = new TreeCache();
export default treeCache;
export default treeCache;

View File

@ -316,6 +316,24 @@ function dynamicRequire(moduleName) {
}
}
function timeLimit(promise, limitMs) {
return new Promise((res, rej) => {
let resolved = false;
promise.then(result => {
resolved = true;
res(result);
});
setTimeout(() => {
if (!resolved) {
rej(new Error('Process exceeded time limit ' + limitMs));
}
}, limitMs);
});
}
export default {
reloadApp,
parseDate,
@ -355,5 +373,6 @@ export default {
normalizeShortcut,
copySelectionToClipboard,
isCKEditorInitialized,
dynamicRequire
dynamicRequire,
timeLimit
};

View File

@ -25,7 +25,8 @@ function logError(message) {
if (ws && ws.readyState === 1) {
ws.send(JSON.stringify({
type: 'log-error',
error: message
error: message,
stack: new Error().stack
}));
}
}
@ -156,7 +157,7 @@ async function consumeSyncData() {
const nonProcessedSyncRows = allSyncRows.filter(sync => !processedSyncIds.has(sync.id));
try {
await processSyncRows(nonProcessedSyncRows);
await utils.timeLimit(processSyncRows(nonProcessedSyncRows), 5000);
}
catch (e) {
logError(`Encountered error ${e.message}: ${e.stack}, reloading frontend.`);

View File

@ -1,13 +1,11 @@
import utils from '../services/utils.js';
import Mutex from "../services/mutex.js";
/**
* Abstract class for all components in the Trilium's frontend.
*
* Contains also event implementation with following properties:
* - event / command distribution is synchronous which among others mean that events are well ordered - event
* which was sent out first will also be processed first by the component since it was added to the mutex queue
* as the first one
* which was sent out first will also be processed first by the component
* - execution of the event / command is asynchronous - each component executes the event on its own without regard for
* other components.
* - although the execution is async, we are collecting all the promises and therefore it is possible to wait until the
@ -19,7 +17,6 @@ export default class Component {
/** @type Component[] */
this.children = [];
this.initialized = Promise.resolve();
this.mutex = new Mutex();
}
setParent(parent) {
@ -79,22 +76,8 @@ export default class Component {
return false;
}
let release;
await fun.call(this, data);
try {
if (this.mutex.isLocked()) {
console.debug("Mutex locked for", this.constructor.name);
}
release = await this.mutex.acquire();
await fun.call(this, data);
return true;
} finally {
if (release) {
release();
}
}
return true;
}
}

View File

@ -35,7 +35,7 @@ const TPL = `
<a class="dropdown-item sync-now-button" title="Trigger sync">
<span class="bx bx-refresh"></span>
Sync (<span id="outstanding-syncs-count">0</span>)
Sync now (<span id="outstanding-syncs-count">0</span>)
</a>
<a class="dropdown-item" data-trigger-command="openNewWindow">
@ -116,4 +116,4 @@ export default class GlobalMenuWidget extends BasicWidget {
return this.$widget;
}
}
}

View File

@ -271,13 +271,30 @@ export default class NoteDetailWidget extends TabAwareWidget {
}
async entitiesReloadedEvent({loadResults}) {
// FIXME: we should test what happens when the loaded note is deleted
if (loadResults.isNoteContentReloaded(this.noteId, this.componentId)
|| (loadResults.isNoteReloaded(this.noteId, this.componentId) && (this.type !== await this.getWidgetType() || this.mime !== this.note.mime))) {
this.handleEvent('noteTypeMimeChanged', {noteId: this.noteId});
}
else {
const attrs = loadResults.getAttributes();
const label = attrs.find(attr =>
attr.type === 'label'
&& ['readOnly', 'autoReadOnlyDisabled', 'cssClass', 'bookZoomLevel'].includes(attr.name)
&& attr.isAffecting(this.note));
const relation = attrs.find(attr =>
attr.type === 'relation'
&& ['template', 'renderNote'].includes(attr.name)
&& attr.isAffecting(this.note));
if (label || relation) {
// probably incorrect event
// calling this.refresh() is not enough since the event needs to be propagated to children as well
this.handleEvent('noteTypeMimeChanged', {noteId: this.noteId});
}
}
}
beforeUnloadEvent() {

View File

@ -100,4 +100,4 @@ export default class NoteTitleWidget extends TabAwareWidget {
beforeUnloadEvent() {
this.spacedUpdate.updateNowIfNecessary();
}
}
}

View File

@ -580,7 +580,17 @@ export default class NoteTreeWidget extends TabAwareWidget {
const noteList = [];
const hideArchivedNotes = this.hideArchivedNotes;
for (const branch of this.getChildBranches(parentNote)) {
if (hideArchivedNotes) {
const note = await branch.getNote();
if (note.hasLabel('archived')) {
continue;
}
}
const node = await this.prepareNode(branch);
noteList.push(node);
@ -604,6 +614,11 @@ export default class NoteTreeWidget extends TabAwareWidget {
childBranches = childBranches.filter(branch => !imageLinks.find(rel => rel.value === branch.noteId));
}
// we're not checking hideArchivedNotes since that would mean we need to lazy load the child notes
// which would seriously slow down everything.
// we check this flag only once user chooses to expand the parent. This has the negative consequence that
// note may appear as folder but not contain any children when all of them are archived
return childBranches;
}
@ -732,17 +747,20 @@ export default class NoteTreeWidget extends TabAwareWidget {
}
/** @return {FancytreeNode} */
async getNodeFromPath(notePath, expand = false) {
async getNodeFromPath(notePath, expand = false, logErrors = true) {
utils.assertArguments(notePath);
const hoistedNoteId = hoistedNoteService.getHoistedNoteId();
/** @var {FancytreeNode} */
let parentNode = null;
const runPath = await treeService.getRunPath(notePath);
const runPath = await treeService.getRunPath(notePath, logErrors);
if (!runPath) {
console.error("Could not find run path for notePath:", notePath);
if (logErrors) {
console.error("Could not find run path for notePath:", notePath);
}
return;
}
@ -779,7 +797,10 @@ export default class NoteTreeWidget extends TabAwareWidget {
foundChildNode = this.findChildNode(parentNode, childNoteId);
if (!foundChildNode) {
ws.logError(`Can't find node for child node of noteId=${childNoteId} for parent of noteId=${parentNode.data.noteId} and hoistedNoteId=${hoistedNoteId}, requested path is ${notePath}`);
if (logErrors) {
ws.logError(`Can't find node for child node of noteId=${childNoteId} for parent of noteId=${parentNode.data.noteId} and hoistedNoteId=${hoistedNoteId}, requested path is ${notePath}`);
}
return;
}
}
@ -806,8 +827,8 @@ export default class NoteTreeWidget extends TabAwareWidget {
}
/** @return {FancytreeNode} */
async expandToNote(notePath) {
return this.getNodeFromPath(notePath, true);
async expandToNote(notePath, logErrors = true) {
return this.getNodeFromPath(notePath, true, logErrors);
}
updateNode(node) {
@ -1008,7 +1029,7 @@ export default class NoteTreeWidget extends TabAwareWidget {
}
if (activeNotePath) {
let node = await this.expandToNote(activeNotePath);
let node = await this.expandToNote(activeNotePath, false);
if (node && node.data.noteId !== activeNoteId) {
// if the active note has been moved elsewhere then it won't be found by the path
@ -1024,7 +1045,7 @@ export default class NoteTreeWidget extends TabAwareWidget {
}
else {
// this is used when original note has been deleted and we want to move the focus to the note above/below
node = await this.expandToNote(nextNotePath);
node = await this.expandToNote(nextNotePath, false);
if (node) {
await appContext.tabManager.getActiveTabContext().setNote(nextNotePath);

View File

@ -149,6 +149,8 @@ export default class PromotedAttributesWidget extends TabAwareWidget {
cb(filtered);
}
}]);
$input.on('autocomplete:selected', e => this.promotedAttributeChanged(e))
});
}
else if (definition.labelType === 'number') {
@ -229,7 +231,7 @@ export default class PromotedAttributesWidget extends TabAwareWidget {
.prop("title", "Remove this attribute")
.on('click', async () => {
if (valueAttr.attributeId) {
await server.remove("notes/" + this.noteId + "/attributes/" + valueAttr.attributeId);
await server.remove("notes/" + this.noteId + "/attributes/" + valueAttr.attributeId, this.componentId);
}
$tr.remove();
@ -263,8 +265,14 @@ export default class PromotedAttributesWidget extends TabAwareWidget {
type: $attr.prop("attribute-type"),
name: $attr.prop("attribute-name"),
value: value
});
}, this.componentId);
$attr.prop("attribute-id", result.attributeId);
}
entitiesReloadedEvent({loadResults}) {
if (loadResults.getAttributes(this.componentId).find(attr => attr.isAffecting(this.note))) {
this.refresh();
}
}
}

View File

@ -62,4 +62,12 @@ export default class AbstractTextTypeWidget extends TypeWidget {
$el.text(title);
}
}
refreshIncludedNote($container, noteId) {
if ($container) {
$container.find(`section[data-note-id="${noteId}"]`).each((_, el) => {
this.loadIncludedNote(noteId, $(el));
});
}
}
}

View File

@ -274,4 +274,8 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
return notePath;
}
async refreshIncludedNoteEvent({noteId}) {
this.refreshIncludedNote(this.$editor, noteId);
}
}

View File

@ -81,4 +81,8 @@ export default class ReadOnlyTextTypeWidget extends AbstractTextTypeWidget {
this.loadIncludedNote(noteId, $(el));
});
}
async refreshIncludedNoteEvent({noteId}) {
this.refreshIncludedNote(this.$content, noteId);
}
}

View File

@ -68,6 +68,8 @@
.note-detail-image {
text-align: center;
height: 100%;
overflow: auto;
}
.note-detail-image-view {
@ -92,4 +94,4 @@
max-height: 300px;
overflow: auto;
margin: 10px;
}
}

View File

@ -23,7 +23,11 @@ async function exportBranch(req, res) {
try {
if (type === 'subtree' && (format === 'html' || format === 'markdown')) {
const start = Date.now();
await zipExportService.exportToZip(taskContext, branch, format, res);
console.log("Export took", Date.now() - start, "ms");
}
else if (type === 'single') {
await singleExportService.exportSingleNote(taskContext, branch, format, res);

View File

@ -51,7 +51,11 @@ async function importToBranch(req) {
if (extension === '.tar' && options.explodeArchives) {
note = await tarImportService.importTar(taskContext, file.buffer, parentNote);
} else if (extension === '.zip' && options.explodeArchives) {
const start = Date.now();
note = await zipImportService.importZip(taskContext, file.buffer, parentNote);
console.log("Import took", Date.now() - start, "ms");
} else if (extension === '.opml' && options.explodeArchives) {
note = await opmlImportService.importOpml(taskContext, file.buffer, parentNote);
} else if (extension === '.enex' && options.explodeArchives) {

View File

@ -68,7 +68,7 @@ async function loginToProtectedSession(req) {
const protectedSessionId = protectedSessionService.setDataKey(decryptedDataKey);
// this is set here so that event handlers have access to the protected session
cls.namespace.set('protectedSessionId', protectedSessionId);
cls.set('protectedSessionId', protectedSessionId);
await eventService.emit(eventService.ENTER_PROTECTED_SESSION);

View File

@ -8,69 +8,55 @@ const noteCacheService = require('../../services/note_cache/note_cache.js');
async function getRecentChanges(req) {
const {ancestorNoteId} = req.params;
const noteRows = await sql.getRows(
`
SELECT * FROM (
SELECT note_revisions.noteId,
note_revisions.noteRevisionId,
note_revisions.dateLastEdited AS date
FROM note_revisions
ORDER BY note_revisions.dateLastEdited DESC
)
UNION ALL SELECT * FROM (
SELECT
notes.noteId,
NULL AS noteRevisionId,
dateModified AS date
FROM notes
ORDER BY dateModified DESC
)
ORDER BY date DESC`);
let recentChanges = [];
const recentChanges = [];
const noteRevisions = await sql.getRows(`
SELECT
notes.noteId,
notes.isDeleted AS current_isDeleted,
notes.deleteId AS current_deleteId,
notes.isErased AS current_isErased,
notes.title AS current_title,
notes.isProtected AS current_isProtected,
note_revisions.title,
note_revisions.utcDateCreated AS utcDate,
note_revisions.dateCreated AS date
FROM
note_revisions
JOIN notes USING(noteId)`);
for (const noteRow of noteRows) {
if (!noteCacheService.isInAncestor(noteRow.noteId, ancestorNoteId)) {
continue;
}
if (noteRow.noteRevisionId) {
recentChanges.push(await sql.getRow(`
SELECT
notes.noteId,
notes.isDeleted AS current_isDeleted,
notes.deleteId AS current_deleteId,
notes.isErased AS current_isErased,
notes.title AS current_title,
notes.isProtected AS current_isProtected,
note_revisions.title,
note_revisions.dateCreated AS date
FROM
note_revisions
JOIN notes USING(noteId)
WHERE noteRevisionId = ?`, [noteRow.noteRevisionId]));
}
else {
recentChanges.push(await sql.getRow(`
SELECT
notes.noteId,
notes.isDeleted AS current_isDeleted,
notes.deleteId AS current_deleteId,
notes.isErased AS current_isErased,
notes.title AS current_title,
notes.isProtected AS current_isProtected,
notes.title,
notes.dateModified AS date
FROM
notes
WHERE noteId = ?`, [noteRow.noteId]));
}
if (recentChanges.length >= 200) {
break;
for (const noteRevision of noteRevisions) {
if (noteCacheService.isInAncestor(noteRevision.noteId, ancestorNoteId)) {
recentChanges.push(noteRevision);
}
}
const notes = await sql.getRows(`
SELECT
notes.noteId,
notes.isDeleted AS current_isDeleted,
notes.deleteId AS current_deleteId,
notes.isErased AS current_isErased,
notes.title AS current_title,
notes.isProtected AS current_isProtected,
notes.title,
notes.utcDateCreated AS utcDate,
notes.dateCreated AS date
FROM
notes`);
for (const note of notes) {
if (noteCacheService.isInAncestor(note.noteId, ancestorNoteId)) {
recentChanges.push(note);
}
}
recentChanges.sort((a, b) => a.utcDate > b.utcDate ? -1 : 1);
recentChanges = recentChanges.slice(0, Math.min(500, recentChanges.length));
console.log(recentChanges);
for (const change of recentChanges) {
if (change.current_isProtected) {
if (protectedSessionService.isProtectedSessionAvailable()) {

View File

@ -55,6 +55,8 @@ async function checkSync() {
}
async function syncNow() {
log.info("Received request to trigger sync now.");
return await syncService.sync();
}
@ -168,4 +170,4 @@ module.exports = {
getStats,
syncFinished,
queueSector
};
};

View File

@ -81,9 +81,12 @@ function apiRoute(method, path, routeHandler) {
function route(method, path, middleware, routeHandler, resultHandler, transactional = true) {
router[method](path, ...middleware, async (req, res, next) => {
try {
cls.namespace.bindEmitter(req);
cls.namespace.bindEmitter(res);
const result = await cls.init(async () => {
cls.namespace.set('sourceId', req.headers['trilium-source-id']);
cls.namespace.set('localNowDateTime', req.headers['`trilium-local-now-datetime`']);
cls.set('sourceId', req.headers['trilium-source-id']);
cls.set('localNowDateTime', req.headers['`trilium-local-now-datetime`']);
protectedSessionService.setProtectedSessionId(req);
if (transactional) {

View File

@ -27,9 +27,9 @@ const BUILTIN_ATTRIBUTES = [
{ type: 'label', name: 'customRequestHandler', isDangerous: true },
{ type: 'label', name: 'customResourceProvider', isDangerous: true },
{ type: 'label', name: 'bookZoomLevel', isDangerous: false },
{ type: 'label', name: 'widget', isDangerous: true },
// relation names
{ type: 'relation', name: 'runOnNoteView', isDangerous: true },
{ type: 'relation', name: 'runOnNoteCreation', isDangerous: true },
{ type: 'relation', name: 'runOnNoteTitleChange', isDangerous: true },
{ type: 'relation', name: 'runOnNoteChange', isDangerous: true },
@ -115,13 +115,24 @@ function isAttributeType(type) {
}
function isAttributeDangerous(type, name) {
return BUILTIN_ATTRIBUTES.some(attr =>
attr.type === attr.type &&
return BUILTIN_ATTRIBUTES.some(attr =>
attr.type === attr.type &&
attr.name.toLowerCase() === name.trim().toLowerCase() &&
attr.isDangerous
);
}
function getBuiltinAttributeNames() {
return BUILTIN_ATTRIBUTES
.map(attr => attr.name)
.concat([
'internalLink',
'imageLink',
'includeNoteLink',
'relationMapLink'
]);
}
module.exports = {
getNotesWithLabel,
getNotesWithLabels,
@ -131,5 +142,6 @@ module.exports = {
createAttribute,
getAttributeNames,
isAttributeType,
isAttributeDangerous
};
isAttributeDangerous,
getBuiltinAttributeNames
};

View File

@ -7,7 +7,9 @@ const dataDir = require('./data_dir');
const log = require('./log');
const sqlInit = require('./sql_init');
const syncMutexService = require('./sync_mutex');
const attributeService = require('./attributes');
const cls = require('./cls');
const utils = require('./utils');
const sqlite = require('sqlite');
const sqlite3 = require('sqlite3');
@ -45,7 +47,7 @@ async function copyFile(backupFile) {
for (; attemptCount < COPY_ATTEMPT_COUNT && !success; attemptCount++) {
try {
await sql.executeNoWrap(`VACUUM INTO '${backupFile}'`);
await sql.executeWithoutTransaction(`VACUUM INTO '${backupFile}'`);
success = true;
} catch (e) {
@ -98,13 +100,20 @@ async function anonymize() {
await db.run("UPDATE notes SET title = 'title'");
await db.run("UPDATE note_contents SET content = 'text' WHERE content IS NOT NULL");
await db.run("UPDATE note_revisions SET title = 'title'");
await db.run("UPDATE note_revision_contents SET content = 'title' WHERE content IS NOT NULL");
await db.run("UPDATE attributes SET name = 'name', value = 'value' WHERE type = 'label'");
await db.run("UPDATE attributes SET name = 'name' WHERE type = 'relation' AND name != 'template'");
await db.run("UPDATE note_revision_contents SET content = 'text' WHERE content IS NOT NULL");
// we want to delete all non-builtin attributes because they can contain sensitive names and values
// on the other hand builtin/system attrs should not contain any sensitive info
const builtinAttrs = attributeService.getBuiltinAttributeNames().map(name => "'" + utils.sanitizeSql(name) + "'").join(', ');
await db.run(`UPDATE attributes SET name = 'name', value = 'value' WHERE type = 'label' AND name NOT IN(${builtinAttrs})`);
await db.run(`UPDATE attributes SET name = 'name' WHERE type = 'relation' AND name NOT IN (${builtinAttrs})`);
await db.run("UPDATE branches SET prefix = 'prefix' WHERE prefix IS NOT NULL");
await db.run(`UPDATE options SET value = 'anonymized' WHERE name IN
('documentId', 'documentSecret', 'encryptedDataKey', 'passwordVerificationHash',
'passwordVerificationSalt', 'passwordDerivedKeySalt', 'username', 'syncServerHost', 'syncProxy')`);
('documentId', 'documentSecret', 'encryptedDataKey',
'passwordVerificationHash', 'passwordVerificationSalt',
'passwordDerivedKeySalt', 'username', 'syncServerHost', 'syncProxy')
AND value != ''`);
await db.run("VACUUM");
await db.close();

View File

@ -1 +1 @@
module.exports = { buildDate:"2020-06-03T14:30:07+02:00", buildRevision: "c1fd9825aa6087b5061cdede5dba3f7f9dc62c31" };
module.exports = { buildDate:"2020-06-15T23:26:12+02:00", buildRevision: "9791dab97d9e86c4b02ca593198caffd1b72bbfb" };

View File

@ -9,6 +9,14 @@ function wrap(callback) {
return async () => await init(callback);
}
function get(key) {
return namespace.get(key);
}
function set(key, value) {
namespace.set(key, value);
}
function getSourceId() {
return namespace.get('sourceId');
}
@ -52,6 +60,8 @@ function setEntityToCache(entityName, entityId, entity) {
module.exports = {
init,
wrap,
get,
set,
namespace,
getSourceId,
getLocalNowDateTime,
@ -62,4 +72,4 @@ module.exports = {
addSyncRow,
getEntityFromCache,
setEntityToCache
};
};

View File

@ -47,7 +47,7 @@ async function importTar(taskContext, fileBuffer, importRootNote) {
return noteIdMap[origNoteId];
}
function getMeta(filePath) {
if (!metaFile) {
return {};
@ -403,7 +403,7 @@ async function importTar(taskContext, fileBuffer, importRootNote) {
}
for (const noteId in createdNoteIds) { // now the noteIds are unique
await noteService.scanForLinks(await repository.getNotes(noteId));
await noteService.scanForLinks(await repository.getNote(noteId));
if (!metaFile) {
// if there's no meta file then the notes are created based on the order in that tar file but that
@ -437,4 +437,4 @@ async function importTar(taskContext, fileBuffer, importRootNote) {
module.exports = {
importTar
};
};

View File

@ -434,7 +434,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
});
for (const noteId in createdNoteIds) { // now the noteIds are unique
await noteService.scanForLinks(await repository.getNotes(noteId));
await noteService.scanForLinks(await repository.getNote(noteId));
if (!metaFile) {
// if there's no meta file then the notes are created based on the order in that tar file but that
@ -461,4 +461,4 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
module.exports = {
importZip
};
};

View File

@ -15,11 +15,11 @@ function setDataKey(decryptedDataKey) {
}
function setProtectedSessionId(req) {
cls.namespace.set('protectedSessionId', req.cookies.protectedSessionId);
cls.set('protectedSessionId', req.cookies.protectedSessionId);
}
function getProtectedSessionId() {
return cls.namespace.get('protectedSessionId');
return cls.get('protectedSessionId');
}
function getDataKey() {
@ -63,4 +63,4 @@ module.exports = {
decryptString,
decryptNotes,
setProtectedSessionId
};
};

View File

@ -140,10 +140,6 @@ async function updateEntity(entity) {
await eventService.emit(entity.isDeleted ? eventService.ENTITY_DELETED : eventService.ENTITY_CHANGED, eventPayload);
}
}
if (entity.afterSaving) {
await entity.afterSaving();
}
});
}
@ -159,4 +155,4 @@ module.exports = {
getOption,
updateEntity,
setEntityConstructor
};
};

View File

@ -9,12 +9,13 @@ const syncOptions = require('./sync_options');
// this allows to support system proxy
function exec(opts) {
const client = getClient(opts);
// hack for cases where electron.net does not work but we don't want to set proxy
if (opts.proxy === 'noproxy') {
opts.proxy = null;
}
const client = getClient(opts);
const proxyAgent = getProxyAgent(opts);
const parsedTargetUrl = url.parse(opts.url);
@ -40,7 +41,7 @@ function exec(opts) {
host: parsedTargetUrl.hostname,
port: parsedTargetUrl.port,
path: parsedTargetUrl.path,
timeout: opts.timeout,
timeout: opts.timeout, // works only for node.js client
headers,
agent: proxyAgent
});
@ -104,13 +105,15 @@ async function getImage(imageUrl) {
host: parsedTargetUrl.hostname,
port: parsedTargetUrl.port,
path: parsedTargetUrl.path,
timeout: opts.timeout,
timeout: opts.timeout, // works only for node client
headers: {},
agent: proxyAgent
});
request.on('error', err => reject(generateError(opts, err)));
request.on('abort', err => reject(generateError(opts, err)));
request.on('response', response => {
if (![200, 201, 204].includes(response.statusCode)) {
reject(generateError(opts, response.statusCode + ' ' + response.statusMessage));
@ -173,4 +176,4 @@ function generateError(opts, message) {
module.exports = {
exec,
getImage
};
};

View File

@ -31,7 +31,7 @@ async function executeBundle(bundle, apiParams = {}) {
apiParams.startNote = bundle.note;
}
cls.namespace.set('sourceId', 'script');
cls.set('sourceId', 'script');
// last \r\n is necessary if script contains line comment on its last line
const script = "async function() {\r\n" + bundle.script + "\r\n}";
@ -187,4 +187,4 @@ module.exports = {
executeNoteNoException,
executeScript,
getScriptBundleForFrontend
};
};

View File

@ -6,6 +6,7 @@ const optionService = require('./options');
const syncOptions = require('./sync_options');
const request = require('./request');
const appInfo = require('./app_info');
const utils = require('./utils');
async function hasSyncServerSchemaAndSeed() {
const response = await requestToSyncServer('GET', '/api/setup/status');
@ -43,13 +44,15 @@ async function sendSeedToSyncServer() {
}
async function requestToSyncServer(method, path, body = null) {
return await request.exec({
const timeout = await syncOptions.getSyncTimeout();
return utils.timeLimit(request.exec({
method,
url: await syncOptions.getSyncServerHost() + path,
body,
proxy: await syncOptions.getSyncProxy(),
timeout: await syncOptions.getSyncTimeout()
});
timeout: timeout
}), timeout);
}
async function setupSyncFromSyncServer(syncServerHost, syncProxy, username, password) {
@ -115,4 +118,4 @@ module.exports = {
sendSeedToSyncServer,
setupSyncFromSyncServer,
getSyncSeedOptions
};
};

View File

@ -9,7 +9,7 @@ function setDbConnection(connection) {
dbConnection = connection;
}
[`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `uncaughtException`, `SIGTERM`].forEach(eventType => {
[`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `SIGTERM`].forEach(eventType => {
process.on(eventType, () => {
if (dbConnection) {
// closing connection is especially important to fold -wal file into the main DB file
@ -33,7 +33,7 @@ async function insert(tableName, rec, replace = false) {
const res = await execute(query, Object.values(rec));
return res.lastID;
return res.lastInsertRowid;
}
async function replace(tableName, rec) {
@ -49,34 +49,46 @@ async function upsert(tableName, primaryKey, rec) {
const columns = keys.join(", ");
let i = 0;
const questionMarks = keys.map(colName => "@" + colName).join(", ");
const questionMarks = keys.map(p => ":" + i++).join(", ");
i = 0;
const updateMarks = keys.map(key => `${key} = :${i++}`).join(", ");
const updateMarks = keys.map(colName => `${colName} = @${colName}`).join(", ");
const query = `INSERT INTO ${tableName} (${columns}) VALUES (${questionMarks})
ON CONFLICT (${primaryKey}) DO UPDATE SET ${updateMarks}`;
await execute(query, Object.values(rec));
for (const idx in rec) {
if (rec[idx] === true || rec[idx] === false) {
rec[idx] = rec[idx] ? 1 : 0;
}
}
await execute(query, rec);
}
async function beginTransaction() {
return await execute("BEGIN");
const statementCache = {};
function stmt(sql) {
if (!(sql in statementCache)) {
statementCache[sql] = dbConnection.prepare(sql);
}
return statementCache[sql];
}
async function commit() {
return await execute("COMMIT");
function beginTransaction() {
return stmt("BEGIN").run();
}
async function rollback() {
return await execute("ROLLBACK");
function commit() {
return stmt("COMMIT").run();
}
function rollback() {
return stmt("ROLLBACK").run();
}
async function getRow(query, params = []) {
return await wrap(async db => db.get(query, ...params), query);
return wrap(() => stmt(query).get(params), query);
}
async function getRowOrNull(query, params = []) {
@ -105,18 +117,25 @@ async function getManyRows(query, params) {
const curParams = params.slice(0, Math.min(params.length, PARAM_LIMIT));
params = params.slice(curParams.length);
const curParamsObj = {};
let j = 1;
for (const param of curParams) {
curParamsObj['param' + j++] = param;
}
let i = 1;
const questionMarks = curParams.map(() => "?" + i++).join(",");
const questionMarks = curParams.map(() => ":param" + i++).join(",");
const curQuery = query.replace(/\?\?\?/g, questionMarks);
results = results.concat(await getRows(curQuery, curParams));
results = results.concat(await getRows(curQuery, curParamsObj));
}
return results;
}
async function getRows(query, params = []) {
return await wrap(async db => db.all(query, ...params), query);
return wrap(() => stmt(query).all(params), query);
}
async function getMap(query, params = []) {
@ -150,23 +169,29 @@ async function getColumn(query, params = []) {
}
async function execute(query, params = []) {
return await wrap(async db => db.run(query, ...params), query);
await startTransactionIfNecessary();
return wrap(() => stmt(query).run(params), query);
}
async function executeNoWrap(query, params = []) {
await dbConnection.run(query, ...params);
async function executeWithoutTransaction(query, params = []) {
await dbConnection.run(query, params);
}
async function executeMany(query, params) {
await startTransactionIfNecessary();
// essentially just alias
await getManyRows(query, params);
}
async function executeScript(query) {
return await wrap(async db => db.exec(query), query);
await startTransactionIfNecessary();
return wrap(() => stmt.run(query), query);
}
async function wrap(func, query) {
function wrap(func, query) {
if (!dbConnection) {
throw new Error("DB connection not initialized yet");
}
@ -176,7 +201,7 @@ async function wrap(func, query) {
try {
const startTimestamp = Date.now();
const result = await func(dbConnection);
const result = func(dbConnection);
const milliseconds = Date.now() - startTimestamp;
if (milliseconds >= 300) {
@ -199,61 +224,68 @@ async function wrap(func, query) {
}
}
// true if transaction is active globally.
// cls.namespace.get('isTransactional') OTOH indicates active transaction in active CLS
let transactionActive = false;
// resolves when current transaction ends with either COMMIT or ROLLBACK
let transactionPromise = null;
let transactionPromiseResolve = null;
async function transactional(func) {
if (cls.namespace.get('isInTransaction')) {
return await func();
async function startTransactionIfNecessary() {
if (!cls.get('isTransactional')
|| cls.get('isInTransaction')) {
return;
}
while (transactionActive) {
await transactionPromise;
}
let ret = null;
const thisError = new Error(); // to capture correct stack trace in case of exception
// first set semaphore (atomic operation and only then start transaction
transactionActive = true;
transactionPromise = new Promise(async (resolve, reject) => {
try {
await beginTransaction();
transactionPromise = new Promise(res => transactionPromiseResolve = res);
cls.set('isInTransaction', true);
cls.namespace.set('isInTransaction', true);
await beginTransaction();
}
ret = await func();
async function transactional(func) {
// if the CLS is already transactional then the whole transaction is handled by higher level transactional() call
if (cls.get('isTransactional')) {
return await func();
}
cls.set('isTransactional', true); // this signals that transaction will be needed if there's a write operation
try {
const ret = await func();
if (cls.get('isInTransaction')) {
await commit();
// note that sync rows sent from this action will be sent again by scheduled periodic ping
require('./ws.js').sendPingToAllClients();
transactionActive = false;
resolve();
setTimeout(() => require('./ws').sendPingToAllClients(), 50);
}
catch (e) {
if (transactionActive) {
log.error("Error executing transaction, executing rollback. Inner stack: " + e.stack + "\nOutside stack: " + thisError.stack);
await rollback();
transactionActive = false;
}
reject(e);
}
finally {
cls.namespace.set('isInTransaction', false);
}
});
if (transactionActive) {
await transactionPromise;
return ret;
}
catch (e) {
if (cls.get('isInTransaction')) {
await rollback();
}
return ret;
throw e;
}
finally {
cls.namespace.set('isTransactional', false);
if (cls.namespace.get('isInTransaction')) {
transactionActive = false;
cls.namespace.set('isInTransaction', false);
// resolving even for rollback since this is just semaphore for allowing another write transaction to proceed
transactionPromiseResolve();
}
}
}
module.exports = {
@ -268,7 +300,7 @@ module.exports = {
getMap,
getColumn,
execute,
executeNoWrap,
executeWithoutTransaction,
executeMany,
executeScript,
transactional,

View File

@ -1,8 +1,6 @@
const log = require('./log');
const dataDir = require('./data_dir');
const fs = require('fs');
const sqlite = require('sqlite');
const sqlite3 = require('sqlite3');
const resourceDir = require('./resource_dir');
const appInfo = require('./app_info');
const sql = require('./sql');
@ -12,28 +10,14 @@ const optionService = require('./options');
const port = require('./port');
const Option = require('../entities/option');
const TaskContext = require('./task_context.js');
const Database = require('better-sqlite3');
const dbConnection = new Promise(async (resolve, reject) => {
const db = await sqlite.open({
filename: dataDir.DOCUMENT_PATH,
driver: sqlite3.Database
});
const dbConnection = new Database(dataDir.DOCUMENT_PATH);
dbConnection.pragma('journal_mode = WAL');
db.run('PRAGMA journal_mode = WAL;');
sql.setDbConnection(dbConnection);
sql.setDbConnection(db);
resolve();
});
let dbReadyResolve = null;
const dbReady = new Promise(async (resolve, reject) => {
dbReadyResolve = resolve;
await dbConnection;
initDbConnection();
});
const dbReady = initDbConnection();
async function schemaExists() {
const tableResults = await sql.getRows("SELECT name FROM sqlite_master WHERE type='table' AND name='options'");
@ -78,7 +62,6 @@ async function initDbConnection() {
await require('./options_init').initStartupOptions();
log.info("DB ready.");
dbReadyResolve();
});
}
@ -189,7 +172,6 @@ dbReady.then(async () => {
module.exports = {
dbReady,
dbConnection,
schemaExists,
isDbInitialized,
initDbConnection,

View File

@ -70,7 +70,7 @@ async function sync() {
};
}
else {
log.info("sync failed: " + e.message + e.stack);
log.info("sync failed: " + e.message + "\nstack: " + e.stack);
return {
success: false,
@ -97,7 +97,6 @@ async function doLogin() {
const hash = utils.hmac(documentSecret, timestamp);
const syncContext = { cookieJar: {} };
const resp = await syncRequest(syncContext, 'POST', '/api/login/sync', {
timestamp: timestamp,
syncVersion: appInfo.syncVersion,
@ -259,14 +258,18 @@ async function checkContentHash(syncContext) {
}
async function syncRequest(syncContext, method, requestPath, body) {
return await request.exec({
const timeout = await syncOptions.getSyncTimeout();
const opts = {
method,
url: await syncOptions.getSyncServerHost() + requestPath,
cookieJar: syncContext.cookieJar,
timeout: await syncOptions.getSyncTimeout(),
timeout: timeout,
body,
proxy: proxyToggle ? await syncOptions.getSyncProxy() : null
});
};
return await utils.timeLimit(request.exec(opts), timeout);
}
const primaryKeys = {
@ -369,7 +372,7 @@ sqlInit.dbReady.then(async () => {
setInterval(cls.wrap(sync), 60000);
// kickoff initial sync immediately
setTimeout(cls.wrap(sync), 1000);
setTimeout(cls.wrap(sync), 3000);
setInterval(cls.wrap(updatePushStats), 1000);
});
@ -380,4 +383,4 @@ module.exports = {
getSyncRecords,
stats,
getMaxSyncId
};
};

View File

@ -206,6 +206,14 @@ function formatDownloadTitle(filename, type, mime) {
}
}
if (mime === 'application/octet-stream') {
// we didn't find any good guess for this one, it will be better to just return
// the current name without fake extension. It's possible that the title still preserves to correct
// extension too
return filename;
}
return filename + '.' + extensions[0];
}
}
@ -233,6 +241,24 @@ function getNoteTitle(filePath, replaceUnderscoresWithSpaces, noteMeta) {
}
}
function timeLimit(promise, limitMs) {
return new Promise((res, rej) => {
let resolved = false;
promise.then(result => {
resolved = true;
res(result);
});
setTimeout(() => {
if (!resolved) {
rej(new Error('Process exceeded time limit ' + limitMs));
}
}, limitMs);
});
}
module.exports = {
randomSecureToken,
randomString,
@ -261,7 +287,8 @@ module.exports = {
isStringNote,
quoteRegex,
replaceAll,
formatDownloadTitle,
getNoteTitle,
removeTextFileExtension,
formatDownloadTitle,
timeLimit
};

View File

@ -36,7 +36,7 @@ function init(httpServer, sessionParser) {
const message = JSON.parse(messageJson);
if (message.type === 'log-error') {
log.error('JS Error: ' + message.error);
log.info('JS Error: ' + message.error + '\r\nStack: ' + message.stack);
}
else if (message.type === 'ping') {
lastAcceptedSyncIds[ws.id] = message.lastSyncId;
@ -141,4 +141,4 @@ module.exports = {
syncPullInProgress,
syncPullFinished,
sendPingToAllClients
};
};