Merge pull request #28 from TriliumNext/feature/typescript_backend_4

Convert backend to TypeScript (50% -> 64%)
This commit is contained in:
Elian Doran 2024-04-15 21:00:38 +03:00 committed by GitHub
commit 6ac3c172b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
59 changed files with 971 additions and 638 deletions

120
package-lock.json generated
View File

@ -88,16 +88,22 @@
"trilium": "src/www.js"
},
"devDependencies": {
"@types/archiver": "^6.0.2",
"@types/better-sqlite3": "^7.6.9",
"@types/cls-hooked": "^4.3.8",
"@types/escape-html": "^1.0.4",
"@types/express": "^4.17.21",
"@types/html": "^1.0.4",
"@types/ini": "^4.1.0",
"@types/jsdom": "^21.1.6",
"@types/mime-types": "^2.1.4",
"@types/node": "^20.11.19",
"@types/sanitize-html": "^2.11.0",
"@types/sax": "^1.2.7",
"@types/stream-throttle": "^0.1.4",
"@types/turndown": "^5.0.4",
"@types/ws": "^8.5.10",
"@types/xml2js": "^0.4.14",
"cross-env": "7.0.3",
"electron": "25.9.8",
"electron-builder": "24.13.3",
@ -1169,6 +1175,15 @@
"resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-21.0.0.tgz",
"integrity": "sha512-qVfOiFh0U8ZSkLgA6tf7kj2MciqRbSCWaJZRwftVO7UbtVDNsZAXpWXqvCDtIefvjC83UJB+vHTDOGm5ibXjEA=="
},
"node_modules/@types/archiver": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.2.tgz",
"integrity": "sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==",
"dev": true,
"dependencies": {
"@types/readdir-glob": "*"
}
},
"node_modules/@types/better-sqlite3": {
"version": "7.6.9",
"resolved": "https://registry.npmjs.org/@types/better-sqlite3/-/better-sqlite3-7.6.9.tgz",
@ -1318,6 +1333,12 @@
"@types/node": "*"
}
},
"node_modules/@types/html": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@types/html/-/html-1.0.4.tgz",
"integrity": "sha512-Wb1ymSAftCLxhc3D6vS0Ike/0xg7W6c+DQxAkerU6pD7C8CMzTYwvrwnlcrTfsVO/nMelB9KOKIT7+N5lOeQUg==",
"dev": true
},
"node_modules/@types/http-cache-semantics": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz",
@ -1468,6 +1489,15 @@
"integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
"dev": true
},
"node_modules/@types/readdir-glob": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz",
"integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/responselike": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz",
@ -1559,6 +1589,15 @@
"entities": "^4.4.0"
}
},
"node_modules/@types/sax": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz",
"integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/send": {
"version": "0.17.4",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
@ -1580,12 +1619,27 @@
"@types/node": "*"
}
},
"node_modules/@types/stream-throttle": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/@types/stream-throttle/-/stream-throttle-0.1.4.tgz",
"integrity": "sha512-VxXIHGjVuK8tYsVm60rIQMmF/0xguCeen5OmK5S4Y6K64A+z+y4/GI6anRnVzaUZaJB9Ah9IfbDcO0o1gZCc/w==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/tough-cookie": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
"dev": true
},
"node_modules/@types/turndown": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/@types/turndown/-/turndown-5.0.4.tgz",
"integrity": "sha512-28GI33lCCkU4SGH1GvjDhFgOVr+Tym4PXGBIU1buJUa6xQolniPArtUT+kv42RR2N9MsMLInkr904Aq+ESHBJg==",
"dev": true
},
"node_modules/@types/unist": {
"version": "2.0.10",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz",
@ -1607,6 +1661,15 @@
"@types/node": "*"
}
},
"node_modules/@types/xml2js": {
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"dev": true,
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/yauzl": {
"version": "2.9.2",
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz",
@ -14084,6 +14147,15 @@
"resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-21.0.0.tgz",
"integrity": "sha512-qVfOiFh0U8ZSkLgA6tf7kj2MciqRbSCWaJZRwftVO7UbtVDNsZAXpWXqvCDtIefvjC83UJB+vHTDOGm5ibXjEA=="
},
"@types/archiver": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/@types/archiver/-/archiver-6.0.2.tgz",
"integrity": "sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==",
"dev": true,
"requires": {
"@types/readdir-glob": "*"
}
},
"@types/better-sqlite3": {
"version": "7.6.9",
"resolved": "https://registry.npmjs.org/@types/better-sqlite3/-/better-sqlite3-7.6.9.tgz",
@ -14233,6 +14305,12 @@
"@types/node": "*"
}
},
"@types/html": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@types/html/-/html-1.0.4.tgz",
"integrity": "sha512-Wb1ymSAftCLxhc3D6vS0Ike/0xg7W6c+DQxAkerU6pD7C8CMzTYwvrwnlcrTfsVO/nMelB9KOKIT7+N5lOeQUg==",
"dev": true
},
"@types/http-cache-semantics": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz",
@ -14376,6 +14454,15 @@
"integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
"dev": true
},
"@types/readdir-glob": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz",
"integrity": "sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"@types/responselike": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz",
@ -14444,6 +14531,15 @@
}
}
},
"@types/sax": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz",
"integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"@types/send": {
"version": "0.17.4",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
@ -14465,12 +14561,27 @@
"@types/node": "*"
}
},
"@types/stream-throttle": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/@types/stream-throttle/-/stream-throttle-0.1.4.tgz",
"integrity": "sha512-VxXIHGjVuK8tYsVm60rIQMmF/0xguCeen5OmK5S4Y6K64A+z+y4/GI6anRnVzaUZaJB9Ah9IfbDcO0o1gZCc/w==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"@types/tough-cookie": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
"dev": true
},
"@types/turndown": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/@types/turndown/-/turndown-5.0.4.tgz",
"integrity": "sha512-28GI33lCCkU4SGH1GvjDhFgOVr+Tym4PXGBIU1buJUa6xQolniPArtUT+kv42RR2N9MsMLInkr904Aq+ESHBJg==",
"dev": true
},
"@types/unist": {
"version": "2.0.10",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz",
@ -14492,6 +14603,15 @@
"@types/node": "*"
}
},
"@types/xml2js": {
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"@types/yauzl": {
"version": "2.9.2",
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz",

View File

@ -109,16 +109,22 @@
"yauzl": "3.1.2"
},
"devDependencies": {
"@types/archiver": "^6.0.2",
"@types/better-sqlite3": "^7.6.9",
"@types/cls-hooked": "^4.3.8",
"@types/escape-html": "^1.0.4",
"@types/express": "^4.17.21",
"@types/html": "^1.0.4",
"@types/ini": "^4.1.0",
"@types/jsdom": "^21.1.6",
"@types/mime-types": "^2.1.4",
"@types/node": "^20.11.19",
"@types/sanitize-html": "^2.11.0",
"@types/sax": "^1.2.7",
"@types/stream-throttle": "^0.1.4",
"@types/turndown": "^5.0.4",
"@types/ws": "^8.5.10",
"@types/xml2js": "^0.4.14",
"cross-env": "7.0.3",
"electron": "25.9.8",
"electron-builder": "24.13.3",

View File

@ -1,5 +1,5 @@
const anonymizationService = require('./services/anonymization');
const sqlInit = require('./services/sql_init');
import anonymizationService = require('./services/anonymization');
import sqlInit = require('./services/sql_init');
require('./becca/entity_constructor');
sqlInit.dbReady.then(async () => {
@ -16,7 +16,7 @@ sqlInit.dbReady.then(async () => {
console.log("Anonymization failed.");
}
}
catch (e) {
catch (e: any) {
console.error(e.message, e.stack);
}

View File

@ -26,10 +26,10 @@ app.use(helmet({
crossOriginEmbedderPolicy: false
}));
app.use(express.text({limit: '500mb'}));
app.use(express.json({limit: '500mb'}));
app.use(express.raw({limit: '500mb'}));
app.use(express.urlencoded({extended: false}));
app.use(express.text({ limit: '500mb' }));
app.use(express.json({ limit: '500mb' }));
app.use(express.raw({ limit: '500mb' }));
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public/root')));
app.use(`/manifest.webmanifest`, express.static(path.join(__dirname, 'public/manifest.webmanifest')));
@ -49,7 +49,7 @@ require('./services/sync');
require('./services/backup');
// trigger consistency checks timer
require('./services/consistency_checks.js');
require('./services/consistency_checks');
require('./services/scheduler.js');

View File

@ -25,7 +25,7 @@ interface ContentOpts {
*/
abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
protected utcDateModified?: string;
utcDateModified?: string;
protected dateCreated?: string;
protected dateModified?: string;

View File

@ -62,7 +62,7 @@ export interface BlobRow {
utcDateModified: string;
}
export type AttributeType = "label" | "relation";
export type AttributeType = "label" | "relation" | "label-definition" | "relation-definition";
export interface AttributeRow {
attributeId?: string;

View File

@ -7,8 +7,8 @@ const TaskContext = require('../services/task_context');
const v = require('./validators.js');
const searchService = require('../services/search/services/search');
const SearchContext = require('../services/search/search_context');
const zipExportService = require('../services/export/zip.js');
const zipImportService = require('../services/import/zip.js');
const zipExportService = require('../services/export/zip');
const zipImportService = require('../services/import/zip');
function register(router) {
eu.route(router, 'get', '/etapi/notes', (req, res, next) => {

View File

@ -1,4 +1,4 @@
const specialNotesService = require('../services/special_notes.js');
const specialNotesService = require('../services/special_notes');
const dateNotesService = require('../services/date_notes');
const eu = require('./etapi_utils');
const mappers = require('./mappers.js');
@ -17,7 +17,7 @@ function isValidDate(date) {
function register(router) {
eu.route(router, 'get', '/etapi/inbox/:date', (req, res, next) => {
const {date} = req.params;
const { date } = req.params;
if (!isValidDate(date)) {
throw getDateInvalidError(date);
@ -28,7 +28,7 @@ function register(router) {
});
eu.route(router, 'get', '/etapi/calendar/days/:date', (req, res, next) => {
const {date} = req.params;
const { date } = req.params;
if (!isValidDate(date)) {
throw getDateInvalidError(date);
@ -39,7 +39,7 @@ function register(router) {
});
eu.route(router, 'get', '/etapi/calendar/weeks/:date', (req, res, next) => {
const {date} = req.params;
const { date } = req.params;
if (!isValidDate(date)) {
throw getDateInvalidError(date);
@ -50,7 +50,7 @@ function register(router) {
});
eu.route(router, 'get', '/etapi/calendar/months/:month', (req, res, next) => {
const {month} = req.params;
const { month } = req.params;
if (!/[0-9]{4}-[0-9]{2}/.test(month)) {
throw getMonthInvalidError(month);
@ -61,7 +61,7 @@ function register(router) {
});
eu.route(router, 'get', '/etapi/calendar/years/:year', (req, res, next) => {
const {year} = req.params;
const { year } = req.params;
if (!/[0-9]{4}/.test(year)) {
throw getYearInvalidError(year);

View File

@ -1,7 +1,7 @@
const becca = require('../../becca/becca');
const blobService = require('../../services/blob');
const ValidationError = require('../../errors/validation_error');
const imageService = require("../../services/image.js");
const imageService = require("../../services/image");
function getAttachmentBlob(req) {
const preview = req.query.preview === 'true';

View File

@ -5,7 +5,7 @@ const cloneService = require('../../services/cloning');
const noteService = require('../../services/notes');
const dateNoteService = require('../../services/date_notes');
const dateUtils = require('../../services/date_utils');
const imageService = require('../../services/image.js');
const imageService = require('../../services/image');
const appInfo = require('../../services/app_info');
const ws = require('../../services/ws');
const log = require('../../services/log');

View File

@ -4,7 +4,7 @@ const sql = require('../../services/sql');
const log = require('../../services/log');
const backupService = require('../../services/backup');
const anonymizationService = require('../../services/anonymization');
const consistencyChecksService = require('../../services/consistency_checks.js');
const consistencyChecksService = require('../../services/consistency_checks');
function getExistingBackups() {
return backupService.getExistingBackups();

View File

@ -1,8 +1,8 @@
"use strict";
const zipExportService = require('../../services/export/zip.js');
const singleExportService = require('../../services/export/single.js');
const opmlExportService = require('../../services/export/opml.js');
const zipExportService = require('../../services/export/zip');
const singleExportService = require('../../services/export/single');
const opmlExportService = require('../../services/export/opml');
const becca = require('../../becca/becca');
const TaskContext = require('../../services/task_context');
const log = require('../../services/log');

View File

@ -1,6 +1,6 @@
"use strict";
const imageService = require('../../services/image.js');
const imageService = require('../../services/image');
const becca = require('../../becca/becca');
const RESOURCE_DIR = require('../../services/resource_dir').RESOURCE_DIR;
const fs = require('fs');

View File

@ -1,9 +1,9 @@
"use strict";
const enexImportService = require('../../services/import/enex.js');
const opmlImportService = require('../../services/import/opml.js');
const zipImportService = require('../../services/import/zip.js');
const singleImportService = require('../../services/import/single.js');
const enexImportService = require('../../services/import/enex');
const opmlImportService = require('../../services/import/opml');
const zipImportService = require('../../services/import/zip');
const singleImportService = require('../../services/import/single');
const cls = require('../../services/cls');
const path = require('path');
const becca = require('../../becca/becca');
@ -13,8 +13,8 @@ const TaskContext = require('../../services/task_context');
const ValidationError = require('../../errors/validation_error');
async function importNotesToBranch(req) {
const {parentNoteId} = req.params;
const {taskId, last} = req.body;
const { parentNoteId } = req.params;
const { taskId, last } = req.body;
const options = {
safeImport: req.body.safeImport !== 'false',
@ -81,8 +81,8 @@ async function importNotesToBranch(req) {
}
async function importAttachmentsToNote(req) {
const {parentNoteId} = req.params;
const {taskId, last} = req.body;
const { parentNoteId } = req.params;
const { taskId, last } = req.body;
const options = {
shrinkImages: req.body.shrinkImages !== 'false',

View File

@ -1,5 +1,5 @@
const becca = require('../../becca/becca');
const markdownService = require('../../services/import/markdown.js');
const markdownService = require('../../services/import/markdown');
function getIconUsage() {
const iconClassToCountMap = {};

View File

@ -1,10 +1,10 @@
"use strict";
const imageType = require('image-type');
const imageService = require('../../services/image.js');
const imageService = require('../../services/image');
const noteService = require('../../services/notes');
const {sanitizeAttributeName} = require('../../services/sanitize_attribute_name');
const specialNotesService = require('../../services/special_notes.js');
const { sanitizeAttributeName } = require('../../services/sanitize_attribute_name');
const specialNotesService = require('../../services/special_notes');
function uploadImage(req) {
const file = req.file;
@ -17,14 +17,14 @@ function uploadImage(req) {
const parentNote = specialNotesService.getInboxNote(req.headers['x-local-date']);
const {note, noteId} = imageService.saveImage(parentNote.noteId, file.buffer, originalName, true);
const { note, noteId } = imageService.saveImage(parentNote.noteId, file.buffer, originalName, true);
const labelsStr = req.headers['x-labels'];
if (labelsStr?.trim()) {
const labels = JSON.parse(labelsStr);
for (const {name, value} of labels) {
for (const { name, value } of labels) {
note.setLabel(sanitizeAttributeName(name), value);
}
}
@ -39,7 +39,7 @@ function uploadImage(req) {
function saveNote(req) {
const parentNote = specialNotesService.getInboxNote(req.headers['x-local-date']);
const {note, branch} = noteService.createNewNote({
const { note, branch } = noteService.createNewNote({
parentNoteId: parentNote.noteId,
title: req.body.title,
content: req.body.content,
@ -49,7 +49,7 @@ function saveNote(req) {
});
if (req.body.labels) {
for (const {name, value} of req.body.labels) {
for (const { name, value } of req.body.labels) {
note.setLabel(sanitizeAttributeName(name), value);
}
}

View File

@ -1,7 +1,7 @@
"use strict";
const sqlInit = require('../../services/sql_init');
const setupService = require('../../services/setup.js');
const setupService = require('../../services/setup');
const log = require('../../services/log');
const appInfo = require('../../services/app_info');
@ -24,7 +24,7 @@ function setupSyncFromServer(req) {
}
function saveSyncSeed(req) {
const {options, syncVersion} = req.body;
const { options, syncVersion } = req.body;
if (appInfo.syncVersion !== syncVersion) {
const message = `Could not setup sync since local sync protocol version is ${appInfo.syncVersion} while remote is ${syncVersion}. To fix this issue, use same Trilium version on all instances.`;

View File

@ -3,7 +3,7 @@
const dateNoteService = require('../../services/date_notes');
const sql = require('../../services/sql');
const cls = require('../../services/cls');
const specialNotesService = require('../../services/special_notes.js');
const specialNotesService = require('../../services/special_notes');
const becca = require('../../becca/becca');
function getInboxNote(req) {

View File

@ -132,7 +132,7 @@ function getChanged(req) {
const partialRequests = {};
function update(req) {
let {body} = req;
let { body } = req;
const pageCount = parseInt(req.get('pageCount'));
const pageIndex = parseInt(req.get('pageIndex'));
@ -164,7 +164,7 @@ function update(req) {
}
}
const {entities, instanceId} = body;
const { entities, instanceId } = body;
sql.transactional(() => syncUpdateService.updateEntities(entities, instanceId));
}
@ -193,7 +193,7 @@ function queueSector(req) {
}
function checkEntityChanges() {
require('../../services/consistency_checks.js').runEntityChangesChecks();
require('../../services/consistency_checks').runEntityChangesChecks();
}
module.exports = {

View File

@ -41,14 +41,14 @@ const importRoute = require('./api/import.js');
const setupApiRoute = require('./api/setup.js');
const sqlRoute = require('./api/sql');
const databaseRoute = require('./api/database.js');
const imageRoute = require('./api/image.js');
const imageRoute = require('./api/image');
const attributesRoute = require('./api/attributes');
const scriptRoute = require('./api/script.js');
const senderRoute = require('./api/sender.js');
const filesRoute = require('./api/files.js');
const searchRoute = require('./api/search');
const bulkActionRoute = require('./api/bulk_action.js');
const specialNotesRoute = require('./api/special_notes.js');
const specialNotesRoute = require('./api/special_notes');
const noteMapRoute = require('./api/note_map.js');
const clipperRoute = require('./api/clipper.js');
const similarNotesRoute = require('./api/similar_notes.js');
@ -67,7 +67,7 @@ const etapiAttachmentRoutes = require('../etapi/attachments.js');
const etapiAttributeRoutes = require('../etapi/attributes');
const etapiBranchRoutes = require('../etapi/branches.js');
const etapiNoteRoutes = require('../etapi/notes.js');
const etapiSpecialNoteRoutes = require('../etapi/special_notes.js');
const etapiSpecialNoteRoutes = require('../etapi/special_notes');
const etapiSpecRoute = require('../etapi/spec.js');
const etapiBackupRoute = require('../etapi/backup');
@ -230,7 +230,7 @@ function register(app) {
apiRoute(GET, '/api/app-info', appInfoRoute.getAppInfo);
// docker health check
route(GET, '/api/health-check', [], () => ({"status": "ok"}), apiResultHandler);
route(GET, '/api/health-check', [], () => ({ "status": "ok" }), apiResultHandler);
// group of the services below are meant to be executed from the outside
route(GET, '/api/setup/status', [], setupApiRoute.getStatus, apiResultHandler);

View File

@ -1,7 +1,7 @@
"use strict";
const sqlInit = require('../services/sql_init');
const setupService = require('../services/setup.js');
const setupService = require('../services/setup');
const utils = require('../services/utils');
const assetPath = require('../services/asset_path');
const appPath = require('../services/app_path');
@ -10,7 +10,7 @@ function setupPage(req, res) {
if (sqlInit.isDbInitialized()) {
if (utils.isElectron()) {
const windowService = require('../services/window');
const {app} = require('electron');
const { app } = require('electron');
windowService.createMainWindow(app);
windowService.closeSetupWindow();
}

View File

@ -90,7 +90,7 @@ function getExistingAnonymizedDatabases() {
}));
}
module.exports = {
export = {
getFullAnonymizationScript,
createAnonymizedCopy,
getExistingAnonymizedDatabases

View File

@ -0,0 +1,17 @@
import { OptionRow } from "../becca/entities/rows";
/**
* Response for /api/setup/status.
*/
export interface SetupStatusResponse {
syncVersion: number;
schemaExists: boolean;
}
/**
* Response for /api/setup/sync-seed.
*/
export interface SetupSyncSeedResponse {
syncVersion: number;
options: OptionRow[]
}

View File

@ -15,10 +15,10 @@ const searchService = require('./search/services/search');
const SearchContext = require('./search/search_context');
const becca = require('../becca/becca');
const ws = require('./ws');
const SpacedUpdate = require('./spaced_update.js');
const specialNotesService = require('./special_notes.js');
const SpacedUpdate = require('./spaced_update');
const specialNotesService = require('./special_notes');
const branchService = require('./branches');
const exportService = require('./export/zip.js');
const exportService = require('./export/zip');
const syncMutex = require('./sync_mutex');
const backupService = require('./backup');
const optionsService = require('./options');
@ -320,7 +320,7 @@ function BackendScriptApi(currentNote, apiParams) {
* @param {string} [extraOptions.attributes.value] - attribute value
* @returns {{note: BNote, branch: BBranch}} object contains newly created entities note and branch
*/
this.createNote = (parentNoteId, title, content = "", extraOptions= {}) => {
this.createNote = (parentNoteId, title, content = "", extraOptions = {}) => {
extraOptions.parentNoteId = parentNoteId;
extraOptions.title = title;
@ -340,7 +340,7 @@ function BackendScriptApi(currentNote, apiParams) {
}
return sql.transactional(() => {
const {note, branch} = noteService.createNewNote(extraOptions);
const { note, branch } = noteService.createNewNote(extraOptions);
for (const attr of extraOptions.attributes || []) {
attributeService.createAttribute({
@ -352,7 +352,7 @@ function BackendScriptApi(currentNote, apiParams) {
});
}
return {note, branch};
return { note, branch };
});
};
@ -369,7 +369,7 @@ function BackendScriptApi(currentNote, apiParams) {
this.log = message => {
log.info(message);
const {noteId} = this.startNote;
const { noteId } = this.startNote;
this.logMessages[noteId] = this.logMessages[noteId] || [];
this.logSpacedUpdates[noteId] = this.logSpacedUpdates[noteId] || new SpacedUpdate(() => {
@ -600,7 +600,7 @@ function BackendScriptApi(currentNote, apiParams) {
launcherNote.removeLabel('iconClass');
}
return {note: launcherNote};
return { note: launcherNote };
};
/**

View File

@ -1,33 +1,41 @@
"use strict";
const sql = require('./sql');
const sqlInit = require('./sql_init');
const log = require('./log');
const ws = require('./ws');
const syncMutexService = require('./sync_mutex');
const cls = require('./cls');
const entityChangesService = require('./entity_changes');
const optionsService = require('./options');
const BBranch = require('../becca/entities/bbranch');
const revisionService = require('./revisions');
const becca = require('../becca/becca');
const utils = require('../services/utils');
const eraseService = require('../services/erase');
const {sanitizeAttributeName} = require('./sanitize_attribute_name');
const noteTypes = require('../services/note_types').getNoteTypeNames();
import sql = require('./sql');
import sqlInit = require('./sql_init');
import log = require('./log');
import ws = require('./ws');
import syncMutexService = require('./sync_mutex');
import cls = require('./cls');
import entityChangesService = require('./entity_changes');
import optionsService = require('./options');
import BBranch = require('../becca/entities/bbranch');
import becca = require('../becca/becca');
import utils = require('../services/utils');
import eraseService = require('../services/erase');
import sanitizeAttributeName = require('./sanitize_attribute_name');
import noteTypesService = require('../services/note_types');
import { BranchRow } from '../becca/entities/rows';
import { EntityChange } from './entity_changes_interface';
const noteTypes = noteTypesService.getNoteTypeNames();
class ConsistencyChecks {
private autoFix: boolean;
private unrecoveredConsistencyErrors: boolean;
private fixedIssues: boolean;
private reloadNeeded: boolean;
/**
* @param autoFix - automatically fix all encountered problems. False is only for debugging during development (fail fast)
*/
constructor(autoFix) {
constructor(autoFix: boolean) {
this.autoFix = autoFix;
this.unrecoveredConsistencyErrors = false;
this.fixedIssues = false;
this.reloadNeeded = false;
}
findAndFixIssues(query, fixerCb) {
findAndFixIssues(query: string, fixerCb: (res: any) => void) {
const results = sql.getRows(query);
for (const res of results) {
@ -39,7 +47,7 @@ class ConsistencyChecks {
} else {
this.unrecoveredConsistencyErrors = true;
}
} catch (e) {
} catch (e: any) {
logError(`Fixer failed with ${e.message} ${e.stack}`);
this.unrecoveredConsistencyErrors = true;
}
@ -49,8 +57,8 @@ class ConsistencyChecks {
}
checkTreeCycles() {
const childToParents = {};
const rows = sql.getRows("SELECT noteId, parentNoteId FROM branches WHERE isDeleted = 0");
const childToParents: Record<string, string[]> = {};
const rows = sql.getRows<BranchRow>("SELECT noteId, parentNoteId FROM branches WHERE isDeleted = 0");
for (const row of rows) {
const childNoteId = row.noteId;
@ -61,7 +69,7 @@ class ConsistencyChecks {
}
/** @returns {boolean} true if cycle was found and we should try again */
const checkTreeCycle = (noteId, path) => {
const checkTreeCycle = (noteId: string, path: string[]) => {
if (noteId === 'root') {
return false;
}
@ -70,8 +78,10 @@ class ConsistencyChecks {
if (path.includes(parentNoteId)) {
if (this.autoFix) {
const branch = becca.getBranchFromChildAndParent(noteId, parentNoteId);
branch.markAsDeleted('cycle-autofix');
logFix(`Branch '${branch.branchId}' between child '${noteId}' and parent '${parentNoteId}' has been deleted since it was causing a tree cycle.`);
if (branch) {
branch.markAsDeleted('cycle-autofix');
logFix(`Branch '${branch.branchId}' between child '${noteId}' and parent '${parentNoteId}' has been deleted since it was causing a tree cycle.`);
}
return true;
}
@ -133,6 +143,9 @@ class ConsistencyChecks {
({branchId, noteId}) => {
if (this.autoFix) {
const branch = becca.getBranch(branchId);
if (!branch) {
return;
}
branch.markAsDeleted();
this.reloadNeeded = true;
@ -154,12 +167,21 @@ class ConsistencyChecks {
if (this.autoFix) {
// Delete the old branch and recreate it with root as parent.
const oldBranch = becca.getBranch(branchId);
if (!oldBranch) {
return;
}
const noteId = oldBranch.noteId;
oldBranch.markAsDeleted("missing-parent");
let message = `Branch '${branchId}' was missing parent note '${parentNoteId}', so it was deleted. `;
if (becca.getNote(noteId).getParentBranches().length === 0) {
const note = becca.getNote(noteId);
if (!note) {
return;
}
if (note.getParentBranches().length === 0) {
const newBranch = new BBranch({
parentNoteId: 'root',
noteId: noteId,
@ -188,6 +210,9 @@ class ConsistencyChecks {
({attributeId, noteId}) => {
if (this.autoFix) {
const attribute = becca.getAttribute(attributeId);
if (!attribute) {
return;
}
attribute.markAsDeleted();
this.reloadNeeded = true;
@ -208,6 +233,9 @@ class ConsistencyChecks {
({attributeId, noteId}) => {
if (this.autoFix) {
const attribute = becca.getAttribute(attributeId);
if (!attribute) {
return;
}
attribute.markAsDeleted();
this.reloadNeeded = true;
@ -230,6 +258,9 @@ class ConsistencyChecks {
({attachmentId, ownerId}) => {
if (this.autoFix) {
const attachment = becca.getAttachment(attachmentId);
if (!attachment) {
return;
}
attachment.markAsDeleted();
this.reloadNeeded = false;
@ -258,6 +289,7 @@ class ConsistencyChecks {
({branchId, noteId}) => {
if (this.autoFix) {
const branch = becca.getBranch(branchId);
if (!branch) return;
branch.markAsDeleted();
this.reloadNeeded = true;
@ -278,6 +310,9 @@ class ConsistencyChecks {
`, ({branchId, parentNoteId}) => {
if (this.autoFix) {
const branch = becca.getBranch(branchId);
if (!branch) {
return;
}
branch.markAsDeleted();
this.reloadNeeded = true;
@ -321,7 +356,7 @@ class ConsistencyChecks {
HAVING COUNT(1) > 1`,
({noteId, parentNoteId}) => {
if (this.autoFix) {
const branchIds = sql.getColumn(
const branchIds = sql.getColumn<string>(
`SELECT branchId
FROM branches
WHERE noteId = ?
@ -333,9 +368,17 @@ class ConsistencyChecks {
// it's not necessarily "original" branch, it's just the only one which will survive
const origBranch = branches[0];
if (!origBranch) {
logError(`Unable to find original branch.`);
return;
}
// delete all but the first branch
for (const branch of branches.slice(1)) {
if (!branch) {
continue;
}
branch.markAsDeleted();
logFix(`Removing branch '${branch.branchId}' since it's a parent-child duplicate of branch '${origBranch.branchId}'`);
@ -357,6 +400,7 @@ class ConsistencyChecks {
({attachmentId, noteId}) => {
if (this.autoFix) {
const attachment = becca.getAttachment(attachmentId);
if (!attachment) return;
attachment.markAsDeleted();
this.reloadNeeded = false;
@ -379,6 +423,7 @@ class ConsistencyChecks {
({noteId, type}) => {
if (this.autoFix) {
const note = becca.getNote(noteId);
if (!note) return;
note.type = 'file'; // file is a safe option to recover notes if the type is not known
note.save();
@ -404,6 +449,10 @@ class ConsistencyChecks {
const fakeDate = "2000-01-01 00:00:00Z";
const blankContent = getBlankContent(isProtected, type, mime);
if (!blankContent) {
logError(`Unable to recover note ${noteId} since it's content could not be retrieved (might be protected note).`);
return;
}
const blobId = utils.hashedBlobId(blankContent);
const blobAlreadyExists = !!sql.getValue("SELECT 1 FROM blobs WHERE blobId = ?", [blobId]);
@ -452,7 +501,11 @@ class ConsistencyChecks {
if (this.autoFix) {
const note = becca.getNote(noteId);
const blankContent = getBlankContent(false, type, mime);
note.setContent(blankContent);
if (!note) return;
if (blankContent) {
note.setContent(blankContent);
}
this.reloadNeeded = true;
@ -506,7 +559,7 @@ class ConsistencyChecks {
AND branches.isDeleted = 0`,
({parentNoteId}) => {
if (this.autoFix) {
const branchIds = sql.getColumn(`
const branchIds = sql.getColumn<string>(`
SELECT branchId
FROM branches
WHERE isDeleted = 0
@ -515,6 +568,8 @@ class ConsistencyChecks {
const branches = branchIds.map(branchId => becca.getBranch(branchId));
for (const branch of branches) {
if (!branch) continue;
// delete the old wrong branch
branch.markAsDeleted("parent-is-search");
@ -543,6 +598,7 @@ class ConsistencyChecks {
({attributeId}) => {
if (this.autoFix) {
const relation = becca.getAttribute(attributeId);
if (!relation) return;
relation.markAsDeleted();
this.reloadNeeded = true;
@ -563,6 +619,7 @@ class ConsistencyChecks {
({attributeId, type}) => {
if (this.autoFix) {
const attribute = becca.getAttribute(attributeId);
if (!attribute) return;
attribute.type = 'label';
attribute.save();
@ -584,6 +641,7 @@ class ConsistencyChecks {
({attributeId, noteId}) => {
if (this.autoFix) {
const attribute = becca.getAttribute(attributeId);
if (!attribute) return;
attribute.markAsDeleted();
this.reloadNeeded = true;
@ -605,6 +663,7 @@ class ConsistencyChecks {
({attributeId, targetNoteId}) => {
if (this.autoFix) {
const attribute = becca.getAttribute(attributeId);
if (!attribute) return;
attribute.markAsDeleted();
this.reloadNeeded = true;
@ -616,14 +675,14 @@ class ConsistencyChecks {
});
}
runEntityChangeChecks(entityName, key) {
runEntityChangeChecks(entityName: string, key: string) {
this.findAndFixIssues(`
SELECT ${key} as entityId
FROM ${entityName}
LEFT JOIN entity_changes ec ON ec.entityName = '${entityName}' AND ec.entityId = ${entityName}.${key}
WHERE ec.id IS NULL`,
({entityId}) => {
const entityRow = sql.getRow(`SELECT * FROM ${entityName} WHERE ${key} = ?`, [entityId]);
const entityRow = sql.getRow<EntityChange>(`SELECT * FROM ${entityName} WHERE ${key} = ?`, [entityId]);
if (this.autoFix) {
entityChangesService.putEntityChange({
@ -691,10 +750,10 @@ class ConsistencyChecks {
}
findWronglyNamedAttributes() {
const attrNames = sql.getColumn(`SELECT DISTINCT name FROM attributes`);
const attrNames = sql.getColumn<string>(`SELECT DISTINCT name FROM attributes`);
for (const origName of attrNames) {
const fixedName = sanitizeAttributeName(origName);
const fixedName = sanitizeAttributeName.sanitizeAttributeName(origName);
if (fixedName !== origName) {
if (this.autoFix) {
@ -721,7 +780,7 @@ class ConsistencyChecks {
findSyncIssues() {
const lastSyncedPush = parseInt(sql.getValue("SELECT value FROM options WHERE name = 'lastSyncedPush'"));
const maxEntityChangeId = sql.getValue("SELECT MAX(id) FROM entity_changes");
const maxEntityChangeId = sql.getValue<number>("SELECT MAX(id) FROM entity_changes");
if (lastSyncedPush > maxEntityChangeId) {
if (this.autoFix) {
@ -773,8 +832,8 @@ class ConsistencyChecks {
}
runDbDiagnostics() {
function getTableRowCount(tableName) {
const count = sql.getValue(`SELECT COUNT(1) FROM ${tableName}`);
function getTableRowCount(tableName: string) {
const count = sql.getValue<number>(`SELECT COUNT(1) FROM ${tableName}`);
return `${tableName}: ${count}`;
}
@ -810,7 +869,7 @@ class ConsistencyChecks {
}
}
function getBlankContent(isProtected, type, mime) {
function getBlankContent(isProtected: boolean, type: string, mime: string) {
if (isProtected) {
return null; // this is wrong for protected non-erased notes, but we cannot create a valid value without a password
}
@ -822,11 +881,11 @@ function getBlankContent(isProtected, type, mime) {
return ''; // empty string might be a wrong choice for some note types, but it's the best guess
}
function logFix(message) {
function logFix(message: string) {
log.info(`Consistency issue fixed: ${message}`);
}
function logError(message) {
function logError(message: string) {
log.info(`Consistency error: ${message}`);
}
@ -837,7 +896,7 @@ function runPeriodicChecks() {
consistencyChecks.runChecks();
}
async function runOnDemandChecks(autoFix) {
async function runOnDemandChecks(autoFix: boolean) {
const consistencyChecks = new ConsistencyChecks(autoFix);
await consistencyChecks.runChecks();
}

View File

@ -7,6 +7,8 @@ export interface EntityChange {
positions?: Record<string, number>;
hash: string;
utcDateChanged?: string;
utcDateModified?: string;
utcDateCreated?: string;
isSynced: boolean | 1 | 0;
isErased: boolean | 1 | 0;
componentId?: string | null;

View File

@ -1,11 +1,11 @@
"use strict";
const TurndownService = require('turndown');
const turndownPluginGfm = require('joplin-turndown-plugin-gfm');
import TurndownService = require('turndown');
import turndownPluginGfm = require('joplin-turndown-plugin-gfm');
let instance = null;
let instance: TurndownService | null = null;
function toMarkdown(content) {
function toMarkdown(content: string) {
if (instance === null) {
instance = new TurndownService({ codeBlockStyle: 'fenced' });
instance.use(turndownPluginGfm.gfm);
@ -14,6 +14,6 @@ function toMarkdown(content) {
return instance.turndown(content);
}
module.exports = {
export = {
toMarkdown
};

View File

@ -1,9 +1,12 @@
"use strict";
const utils = require('../utils');
const becca = require('../../becca/becca');
import utils = require('../utils');
import becca = require('../../becca/becca');
import TaskContext = require('../task_context');
import BBranch = require('../../becca/entities/bbranch');
import { Response } from 'express';
function exportToOpml(taskContext, branch, version, res) {
function exportToOpml(taskContext: TaskContext, branch: BBranch, version: string, res: Response) {
if (!['1.0', '2.0'].includes(version)) {
throw new Error(`Unrecognized OPML version ${version}`);
}
@ -12,9 +15,12 @@ function exportToOpml(taskContext, branch, version, res) {
const note = branch.getNote();
function exportNoteInner(branchId) {
function exportNoteInner(branchId: string) {
const branch = becca.getBranch(branchId);
if (!branch) { throw new Error("Unable to find branch."); }
const note = branch.getNote();
if (!note) { throw new Error("Unable to find note."); }
if (note.hasOwnedLabel('excludeFromExport')) {
return;
@ -24,13 +30,13 @@ function exportToOpml(taskContext, branch, version, res) {
if (opmlVersion === 1) {
const preparedTitle = escapeXmlAttribute(title);
const preparedContent = note.hasStringContent() ? prepareText(note.getContent()) : '';
const preparedContent = note.hasStringContent() ? prepareText(note.getContent() as string) : '';
res.write(`<outline title="${preparedTitle}" text="${preparedContent}">\n`);
}
else if (opmlVersion === 2) {
const preparedTitle = escapeXmlAttribute(title);
const preparedContent = note.hasStringContent() ? escapeXmlAttribute(note.getContent()) : '';
const preparedContent = note.hasStringContent() ? escapeXmlAttribute(note.getContent() as string) : '';
res.write(`<outline text="${preparedTitle}" _note="${preparedContent}">\n`);
}
@ -41,7 +47,9 @@ function exportToOpml(taskContext, branch, version, res) {
taskContext.increaseProgressCount();
for (const child of note.getChildBranches()) {
exportNoteInner(child.branchId);
if (child?.branchId) {
exportNoteInner(child.branchId);
}
}
res.write('</outline>');
@ -60,7 +68,9 @@ function exportToOpml(taskContext, branch, version, res) {
</head>
<body>`);
exportNoteInner(branch.branchId);
if (branch.branchId) {
exportNoteInner(branch.branchId);
}
res.write(`</body>
</opml>`);
@ -69,7 +79,7 @@ function exportToOpml(taskContext, branch, version, res) {
taskContext.taskSucceeded();
}
function prepareText(text) {
function prepareText(text: string) {
const newLines = text.replace(/(<p[^>]*>|<br\s*\/?>)/g, '\n')
.replace(/&nbsp;/g, ' '); // nbsp isn't in XML standard (only HTML)
@ -80,7 +90,7 @@ function prepareText(text) {
return escaped.replace(/\n/g, '&#10;');
}
function escapeXmlAttribute(text) {
function escapeXmlAttribute(text: string) {
return text.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
@ -88,6 +98,6 @@ function escapeXmlAttribute(text) {
.replace(/'/g, '&apos;');
}
module.exports = {
export = {
exportToOpml
};

View File

@ -1,12 +1,15 @@
"use strict";
const mimeTypes = require('mime-types');
const html = require('html');
const utils = require('../utils');
const mdService = require('./md.js');
const becca = require('../../becca/becca');
import mimeTypes = require('mime-types');
import html = require('html');
import utils = require('../utils');
import mdService = require('./md');
import becca = require('../../becca/becca');
import TaskContext = require('../task_context');
import BBranch = require('../../becca/entities/bbranch');
import { Response } from 'express';
function exportSingleNote(taskContext, branch, format, res) {
function exportSingleNote(taskContext: TaskContext, branch: BBranch, format: "html" | "markdown", res: Response) {
const note = branch.getNote();
if (note.type === 'image' || note.type === 'file') {
@ -20,6 +23,9 @@ function exportSingleNote(taskContext, branch, format, res) {
let payload, extension, mime;
let content = note.getContent();
if (typeof content !== "string") {
throw new Error("Unsupported content type for export.");
}
if (note.type === 'text') {
if (format === 'html') {
@ -64,7 +70,7 @@ function exportSingleNote(taskContext, branch, format, res) {
taskContext.taskSucceeded();
}
function inlineAttachments(content) {
function inlineAttachments(content: string) {
content = content.replace(/src="[^"]*api\/images\/([a-zA-Z0-9_]+)\/?[^"]+"/g, (match, noteId) => {
const note = becca.getNote(noteId);
if (!note || !note.mime.startsWith('image/')) {
@ -119,6 +125,6 @@ function inlineAttachments(content) {
return content;
}
module.exports = {
export = {
exportSingleNote
};

View File

@ -1,33 +1,28 @@
"use strict";
const html = require('html');
const dateUtils = require('../date_utils');
const path = require('path');
const mimeTypes = require('mime-types');
const mdService = require('./md.js');
const packageInfo = require('../../../package.json');
const utils = require('../utils');
const protectedSessionService = require('../protected_session');
const sanitize = require("sanitize-filename");
const fs = require("fs");
const becca = require('../../becca/becca');
import html = require('html');
import dateUtils = require('../date_utils');
import path = require('path');
import mimeTypes = require('mime-types');
import mdService = require('./md');
import packageInfo = require('../../../package.json');
import utils = require('../utils');
import protectedSessionService = require('../protected_session');
import sanitize = require("sanitize-filename");
import fs = require("fs");
import becca = require('../../becca/becca');
const RESOURCE_DIR = require('../../services/resource_dir').RESOURCE_DIR;
const archiver = require('archiver');
const log = require('../log');
const TaskContext = require('../task_context');
const ValidationError = require('../../errors/validation_error');
const NoteMeta = require('../meta/note_meta');
const AttachmentMeta = require('../meta/attachment_meta');
const AttributeMeta = require('../meta/attribute_meta');
import archiver = require('archiver');
import log = require('../log');
import TaskContext = require('../task_context');
import ValidationError = require('../../errors/validation_error');
import NoteMeta = require('../meta/note_meta');
import AttachmentMeta = require('../meta/attachment_meta');
import AttributeMeta = require('../meta/attribute_meta');
import BBranch = require('../../becca/entities/bbranch');
import { Response } from 'express';
/**
* @param {TaskContext} taskContext
* @param {BBranch} branch
* @param {string} format - 'html' or 'markdown'
* @param {object} res - express response
* @param {boolean} setHeaders
*/
async function exportToZip(taskContext, branch, format, res, setHeaders = true) {
async function exportToZip(taskContext: TaskContext, branch: BBranch, format: "html" | "markdown", res: Response | fs.WriteStream, setHeaders = true) {
if (!['html', 'markdown'].includes(format)) {
throw new ValidationError(`Only 'html' and 'markdown' allowed as export format, '${format}' given`);
}
@ -36,15 +31,9 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
zlib: { level: 9 } // Sets the compression level.
});
/** @type {Object.<string, NoteMeta>} */
const noteIdToMeta = {};
const noteIdToMeta: Record<string, NoteMeta> = {};
/**
* @param {Object.<string, int>} existingFileNames
* @param {string} fileName
* @returns {string}
*/
function getUniqueFilename(existingFileNames, fileName) {
function getUniqueFilename(existingFileNames: Record<string, number>, fileName: string) {
const lcFileName = fileName.toLowerCase();
if (lcFileName in existingFileNames) {
@ -67,14 +56,7 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
}
}
/**
* @param {string|null} type
* @param {string} mime
* @param {string} baseFileName
* @param {Object.<string, int>} existingFileNames
* @return {string}
*/
function getDataFileName(type, mime, baseFileName, existingFileNames) {
function getDataFileName(type: string | null, mime: string, baseFileName: string, existingFileNames: Record<string, number>): string {
let fileName = baseFileName.trim();
if (fileName.length > 30) {
fileName = fileName.substr(0, 30).trim();
@ -115,13 +97,7 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
return getUniqueFilename(existingFileNames, fileName);
}
/**
* @param {BBranch} branch
* @param {NoteMeta} parentMeta
* @param {Object.<string, int>} existingFileNames
* @returns {NoteMeta|null}
*/
function createNoteMeta(branch, parentMeta, existingFileNames) {
function createNoteMeta(branch: BBranch, parentMeta: Partial<NoteMeta>, existingFileNames: Record<string, number>): NoteMeta | null {
const note = branch.getNote();
if (note.hasOwnedLabel('excludeFromExport')) {
@ -136,24 +112,26 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
baseFileName = baseFileName.substr(0, 200);
}
if (!parentMeta.notePath) { throw new Error("Missing parent note path."); }
const notePath = parentMeta.notePath.concat([note.noteId]);
if (note.noteId in noteIdToMeta) {
const fileName = getUniqueFilename(existingFileNames, `${baseFileName}.clone.${format === 'html' ? 'html' : 'md'}`);
const meta = new NoteMeta();
meta.isClone = true;
meta.noteId = note.noteId;
meta.notePath = notePath;
meta.title = note.getTitleOrProtected();
meta.prefix = branch.prefix;
meta.dataFileName = fileName;
meta.type = 'text'; // export will have text description
meta.format = format;
const meta: NoteMeta = {
isClone: true,
noteId: note.noteId,
notePath: notePath,
title: note.getTitleOrProtected(),
prefix: branch.prefix,
dataFileName: fileName,
type: 'text', // export will have text description
format: format
};
return meta;
}
const meta = new NoteMeta();
const meta: Partial<NoteMeta> = {};
meta.isClone = false;
meta.noteId = note.noteId;
meta.notePath = notePath;
@ -164,12 +142,14 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
meta.type = note.type;
meta.mime = note.mime;
meta.attributes = note.getOwnedAttributes().map(attribute => {
const attrMeta = new AttributeMeta();
attrMeta.type = attribute.type;
attrMeta.name = attribute.name;
attrMeta.value = attribute.value;
attrMeta.isInheritable = attribute.isInheritable;
attrMeta.position = attribute.position;
const attrMeta: AttributeMeta = {
type: attribute.type,
name: attribute.name,
value: attribute.value,
isInheritable: attribute.isInheritable,
position: attribute.position
};
return attrMeta;
});
@ -179,12 +159,12 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
meta.format = format;
}
noteIdToMeta[note.noteId] = meta;
noteIdToMeta[note.noteId] = meta as NoteMeta;
// sort children for having a stable / reproducible export format
note.sortChildren();
const childBranches = note.getChildBranches()
.filter(branch => branch.noteId !== '_hidden');
.filter(branch => branch?.noteId !== '_hidden');
const available = !note.isProtected || protectedSessionService.isProtectedSessionAvailable();
@ -196,18 +176,19 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
const attachments = note.getAttachments();
meta.attachments = attachments
.map(attachment => {
const attMeta = new AttachmentMeta();
attMeta.attachmentId = attachment.attachmentId;
attMeta.title = attachment.title;
attMeta.role = attachment.role;
attMeta.mime = attachment.mime;
attMeta.position = attachment.position;
attMeta.dataFileName = getDataFileName(
null,
attachment.mime,
baseFileName + "_" + attachment.title,
existingFileNames
);
const attMeta: AttachmentMeta = {
attachmentId: attachment.attachmentId,
title: attachment.title,
role: attachment.role,
mime: attachment.mime,
position: attachment.position,
dataFileName: getDataFileName(
null,
attachment.mime,
baseFileName + "_" + attachment.title,
existingFileNames
)
};
return attMeta;
});
@ -219,7 +200,9 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
const childExistingNames = {};
for (const childBranch of childBranches) {
const note = createNoteMeta(childBranch, meta, childExistingNames);
if (!childBranch) { continue; }
const note = createNoteMeta(childBranch, meta as NoteMeta, childExistingNames);
// can be undefined if export is disabled for this note
if (note) {
@ -228,18 +211,13 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
}
}
return meta;
return meta as NoteMeta;
}
/**
* @param {string} targetNoteId
* @param {NoteMeta} sourceMeta
* @return {string|null}
*/
function getNoteTargetUrl(targetNoteId, sourceMeta) {
function getNoteTargetUrl(targetNoteId: string, sourceMeta: NoteMeta): string | null {
const targetMeta = noteIdToMeta[targetNoteId];
if (!targetMeta) {
if (!targetMeta || !targetMeta.notePath || !sourceMeta.notePath) {
return null;
}
@ -256,24 +234,20 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
for (let i = 0; i < targetPath.length - 1; i++) {
const meta = noteIdToMeta[targetPath[i]];
url += `${encodeURIComponent(meta.dirFileName)}/`;
if (meta.dirFileName) {
url += `${encodeURIComponent(meta.dirFileName)}/`;
}
}
const meta = noteIdToMeta[targetPath[targetPath.length - 1]];
// link can target note which is only "folder-note" and as such, will not have a file in an export
url += encodeURIComponent(meta.dataFileName || meta.dirFileName);
url += encodeURIComponent(meta.dataFileName || meta.dirFileName || "");
return url;
}
/**
* @param {string} content
* @param {NoteMeta} noteMeta
* @return {string}
*/
function rewriteLinks(content, noteMeta) {
function rewriteLinks(content: string, noteMeta: NoteMeta): string {
content = content.replace(/src="[^"]*api\/images\/([a-zA-Z0-9_]+)\/[^"]*"/g, (match, targetNoteId) => {
const url = getNoteTargetUrl(targetNoteId, noteMeta);
@ -300,10 +274,10 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
return content;
function findAttachment(targetAttachmentId) {
function findAttachment(targetAttachmentId: string) {
let url;
const attachmentMeta = noteMeta.attachments.find(attMeta => attMeta.attachmentId === targetAttachmentId);
const attachmentMeta = (noteMeta.attachments || []).find(attMeta => attMeta.attachmentId === targetAttachmentId);
if (attachmentMeta) {
// easy job here, because attachment will be in the same directory as the note's data file.
url = attachmentMeta.dataFileName;
@ -314,21 +288,17 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
}
}
/**
* @param {string} title
* @param {string|Buffer} content
* @param {NoteMeta} noteMeta
* @return {string|Buffer}
*/
function prepareContent(title, content, noteMeta) {
if (['html', 'markdown'].includes(noteMeta.format)) {
function prepareContent(title: string, content: string | Buffer, noteMeta: NoteMeta): string | Buffer {
if (['html', 'markdown'].includes(noteMeta?.format || "")) {
content = content.toString();
content = rewriteLinks(content, noteMeta);
}
if (noteMeta.format === 'html') {
if (noteMeta.format === 'html' && typeof content === "string") {
if (!content.substr(0, 100).toLowerCase().includes("<html")) {
if (!noteMeta?.notePath?.length) { throw new Error("Missing note path."); }
const cssUrl = `${"../".repeat(noteMeta.notePath.length - 1)}style.css`;
const htmlTitle = utils.escapeHtml(title);
@ -354,7 +324,7 @@ async function exportToZip(taskContext, branch, format, res, setHeaders = true)
return content.length < 100_000
? html.prettyPrint(content, {indent_size: 2})
: content;
} else if (noteMeta.format === 'markdown') {
} else if (noteMeta.format === 'markdown' && typeof content === "string") {
let markdownContent = mdService.toMarkdown(content);
if (markdownContent.trim().length > 0 && !markdownContent.startsWith("# ")) {
@ -368,17 +338,17 @@ ${markdownContent}`;
}
}
/**
* @param {NoteMeta} noteMeta
* @param {string} filePathPrefix
*/
function saveNote(noteMeta, filePathPrefix) {
function saveNote(noteMeta: NoteMeta, filePathPrefix: string) {
log.info(`Exporting note '${noteMeta.noteId}'`);
if (!noteMeta.noteId || !noteMeta.title) {
throw new Error("Missing note meta.");
}
if (noteMeta.isClone) {
const targetUrl = getNoteTargetUrl(noteMeta.noteId, noteMeta);
let content = `<p>This is a clone of a note. Go to its <a href="${targetUrl}">primary location</a>.</p>`;
let content: string | Buffer = `<p>This is a clone of a note. Go to its <a href="${targetUrl}">primary location</a>.</p>`;
content = prepareContent(noteMeta.title, content, noteMeta);
@ -388,6 +358,8 @@ ${markdownContent}`;
}
const note = becca.getNote(noteMeta.noteId);
if (!note) { throw new Error("Unable to find note."); }
if (!note.utcDateModified) { throw new Error("Unable to find modification date."); }
if (noteMeta.dataFileName) {
const content = prepareContent(noteMeta.title, note.getContent(), noteMeta);
@ -400,7 +372,9 @@ ${markdownContent}`;
taskContext.increaseProgressCount();
for (const attachmentMeta of noteMeta.attachments) {
for (const attachmentMeta of noteMeta.attachments || []) {
if (!attachmentMeta.attachmentId) { continue; }
const attachment = note.getAttachmentById(attachmentMeta.attachmentId);
const content = attachment.getContent();
@ -410,29 +384,25 @@ ${markdownContent}`;
});
}
if (noteMeta.children?.length > 0) {
if (noteMeta.children?.length || 0 > 0) {
const directoryPath = filePathPrefix + noteMeta.dirFileName;
// create directory
archive.append('', { name: `${directoryPath}/`, date: dateUtils.parseDateTime(note.utcDateModified) });
for (const childMeta of noteMeta.children) {
for (const childMeta of noteMeta.children || []) {
saveNote(childMeta, `${directoryPath}/`);
}
}
}
/**
* @param {NoteMeta} rootMeta
* @param {NoteMeta} navigationMeta
*/
function saveNavigation(rootMeta, navigationMeta) {
function saveNavigationInner(meta) {
function saveNavigation(rootMeta: NoteMeta, navigationMeta: NoteMeta) {
function saveNavigationInner(meta: NoteMeta) {
let html = '<li>';
const escapedTitle = utils.escapeHtml(`${meta.prefix ? `${meta.prefix} - ` : ''}${meta.title}`);
if (meta.dataFileName) {
if (meta.dataFileName && meta.noteId) {
const targetUrl = getNoteTargetUrl(meta.noteId, rootMeta);
html += `<a href="${targetUrl}" target="detail">${escapedTitle}</a>`;
@ -470,16 +440,12 @@ ${markdownContent}`;
archive.append(prettyHtml, { name: navigationMeta.dataFileName });
}
/**
* @param {NoteMeta} rootMeta
* @param {NoteMeta} indexMeta
*/
function saveIndex(rootMeta, indexMeta) {
function saveIndex(rootMeta: NoteMeta, indexMeta: NoteMeta) {
let firstNonEmptyNote;
let curMeta = rootMeta;
while (!firstNonEmptyNote) {
if (curMeta.dataFileName) {
if (curMeta.dataFileName && curMeta.noteId) {
firstNonEmptyNote = getNoteTargetUrl(curMeta.noteId, rootMeta);
}
@ -506,17 +472,13 @@ ${markdownContent}`;
archive.append(fullHtml, { name: indexMeta.dataFileName });
}
/**
* @param {NoteMeta} rootMeta
* @param {NoteMeta} cssMeta
*/
function saveCss(rootMeta, cssMeta) {
function saveCss(rootMeta: NoteMeta, cssMeta: NoteMeta) {
const cssContent = fs.readFileSync(`${RESOURCE_DIR}/libraries/ckeditor/ckeditor-content.css`);
archive.append(cssContent, { name: cssMeta.dataFileName });
}
const existingFileNames = format === 'html' ? ['navigation', 'index'] : [];
const existingFileNames: Record<string, number> = format === 'html' ? {'navigation': 0, 'index': 1} : {};
const rootMeta = createNoteMeta(branch, { notePath: [] }, existingFileNames);
const metaFile = {
@ -525,7 +487,9 @@ ${markdownContent}`;
files: [ rootMeta ]
};
let navigationMeta, indexMeta, cssMeta;
let navigationMeta: NoteMeta | null = null;
let indexMeta: NoteMeta | null = null;
let cssMeta: NoteMeta | null = null;
if (format === 'html') {
navigationMeta = {
@ -552,7 +516,7 @@ ${markdownContent}`;
for (const noteMeta of Object.values(noteIdToMeta)) {
// filter out relations which are not inside this export
noteMeta.attributes = noteMeta.attributes.filter(attr => {
noteMeta.attributes = (noteMeta.attributes || []).filter(attr => {
if (attr.type !== 'relation') {
return true;
} else if (attr.value in noteIdToMeta) {
@ -567,7 +531,9 @@ ${markdownContent}`;
}
if (!rootMeta) { // corner case of disabled export for exported note
res.sendStatus(400);
if ("sendStatus" in res) {
res.sendStatus(400);
}
return;
}
@ -578,6 +544,10 @@ ${markdownContent}`;
saveNote(rootMeta, '');
if (format === 'html') {
if (!navigationMeta || !indexMeta || !cssMeta) {
throw new Error("Missing meta.");
}
saveNavigation(rootMeta, navigationMeta);
saveIndex(rootMeta, indexMeta);
saveCss(rootMeta, cssMeta);
@ -586,7 +556,7 @@ ${markdownContent}`;
const note = branch.getNote();
const zipFileName = `${branch.prefix ? `${branch.prefix} - ` : ""}${note.getTitleOrProtected()}.zip`;
if (setHeaders) {
if (setHeaders && "setHeader" in res) {
res.setHeader('Content-Disposition', utils.getContentDisposition(zipFileName));
res.setHeader('Content-Type', 'application/zip');
}
@ -597,7 +567,7 @@ ${markdownContent}`;
taskContext.taskSucceeded();
}
async function exportToZipFile(noteId, format, zipFilePath) {
async function exportToZipFile(noteId: string, format: "markdown" | "html", zipFilePath: string) {
const fileOutputStream = fs.createWriteStream(zipFilePath);
const taskContext = new TaskContext('no-progress-reporting');
@ -612,7 +582,7 @@ async function exportToZipFile(noteId, format, zipFilePath) {
log.info(`Exported '${noteId}' with format '${format}' to '${zipFilePath}'`);
}
module.exports = {
export = {
exportToZip,
exportToZipFile
};

View File

@ -1,19 +1,19 @@
"use strict";
const becca = require('../becca/becca');
const log = require('./log');
const protectedSessionService = require('./protected_session');
const noteService = require('./notes');
const optionService = require('./options');
const sql = require('./sql');
const jimp = require('jimp');
const imageType = require('image-type');
const sanitizeFilename = require('sanitize-filename');
const isSvg = require('is-svg');
const isAnimated = require('is-animated');
const htmlSanitizer = require('./html_sanitizer');
import becca = require('../becca/becca');
import log = require('./log');
import protectedSessionService = require('./protected_session');
import noteService = require('./notes');
import optionService = require('./options');
import sql = require('./sql');
import jimp = require('jimp');
import imageType = require('image-type');
import sanitizeFilename = require('sanitize-filename');
import isSvg = require('is-svg');
import isAnimated = require('is-animated');
import htmlSanitizer = require('./html_sanitizer');
async function processImage(uploadBuffer, originalName, shrinkImageSwitch) {
async function processImage(uploadBuffer: Buffer, originalName: string, shrinkImageSwitch: boolean) {
const compressImages = optionService.getOptionBool("compressImages");
const origImageFormat = getImageType(uploadBuffer);
@ -44,7 +44,7 @@ async function processImage(uploadBuffer, originalName, shrinkImageSwitch) {
};
}
function getImageType(buffer) {
function getImageType(buffer: Buffer) {
if (isSvg(buffer)) {
return {
ext: 'svg'
@ -57,18 +57,19 @@ function getImageType(buffer) {
}
}
function getImageMimeFromExtension(ext) {
function getImageMimeFromExtension(ext: string) {
ext = ext.toLowerCase();
return `image/${ext === 'svg' ? 'svg+xml' : ext}`;
}
function updateImage(noteId, uploadBuffer, originalName) {
function updateImage(noteId: string, uploadBuffer: Buffer, originalName: string) {
log.info(`Updating image ${noteId}: ${originalName}`);
originalName = htmlSanitizer.sanitize(originalName);
const note = becca.getNote(noteId);
if (!note) { throw new Error("Unable to find note."); }
note.saveRevision();
@ -85,7 +86,7 @@ function updateImage(noteId, uploadBuffer, originalName) {
});
}
function saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch, trimFilename = false) {
function saveImage(parentNoteId: string, uploadBuffer: Buffer, originalName: string, shrinkImageSwitch: boolean, trimFilename = false) {
log.info(`Saving image ${originalName} into parent ${parentNoteId}`);
if (trimFilename && originalName.length > 40) {
@ -95,6 +96,7 @@ function saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch,
const fileName = sanitizeFilename(originalName);
const parentNote = becca.getNote(parentNoteId);
if (!parentNote) { throw new Error("Unable to find parent note."); }
const {note} = noteService.createNewNote({
parentNoteId,
@ -131,7 +133,7 @@ function saveImage(parentNoteId, uploadBuffer, originalName, shrinkImageSwitch,
};
}
function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSwitch, trimFilename = false) {
function saveImageToAttachment(noteId: string, uploadBuffer: Buffer, originalName: string, shrinkImageSwitch?: boolean, trimFilename = false) {
log.info(`Saving image '${originalName}' as attachment into note '${noteId}'`);
if (trimFilename && originalName.length > 40) {
@ -160,9 +162,10 @@ function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSw
}, 5000);
// resizing images asynchronously since JIMP does not support sync operation
processImage(uploadBuffer, originalName, shrinkImageSwitch).then(({buffer, imageFormat}) => {
processImage(uploadBuffer, originalName, !!shrinkImageSwitch).then(({buffer, imageFormat}) => {
sql.transactional(() => {
// re-read, might be changed in the meantime
if (!attachment.attachmentId) { throw new Error("Missing attachment ID."); }
attachment = becca.getAttachmentOrThrow(attachment.attachmentId);
attachment.mime = getImageMimeFromExtension(imageFormat.ext);
@ -179,7 +182,7 @@ function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSw
return attachment;
}
async function shrinkImage(buffer, originalName) {
async function shrinkImage(buffer: Buffer, originalName: string) {
let jpegQuality = optionService.getOptionInt('imageJpegQuality', 0);
if (jpegQuality < 10 || jpegQuality > 100) {
@ -190,7 +193,7 @@ async function shrinkImage(buffer, originalName) {
try {
finalImageBuffer = await resize(buffer, jpegQuality);
}
catch (e) {
catch (e: any) {
log.error(`Failed to resize image '${originalName}', stack: ${e.stack}`);
finalImageBuffer = buffer;
@ -205,7 +208,7 @@ async function shrinkImage(buffer, originalName) {
return finalImageBuffer;
}
async function resize(buffer, quality) {
async function resize(buffer: Buffer, quality: number) {
const imageMaxWidthHeight = optionService.getOptionInt('imageMaxWidthHeight');
const start = Date.now();
@ -231,7 +234,7 @@ async function resize(buffer, quality) {
return resultBuffer;
}
module.exports = {
export = {
saveImage,
saveImageToAttachment,
updateImage

View File

@ -0,0 +1,5 @@
export interface File {
originalname: string;
mimetype: string;
buffer: string | Buffer;
}

View File

@ -1,20 +1,23 @@
const sax = require("sax");
const stream = require('stream');
const {Throttle} = require('stream-throttle');
const log = require('../log');
const utils = require('../utils');
const sql = require('../sql');
const noteService = require('../notes');
const imageService = require('../image.js');
const protectedSessionService = require('../protected_session');
const htmlSanitizer = require('../html_sanitizer');
const {sanitizeAttributeName} = require('../sanitize_attribute_name');
import sax = require("sax");
import stream = require('stream');
import { Throttle } from 'stream-throttle';
import log = require('../log');
import utils = require('../utils');
import sql = require('../sql');
import noteService = require('../notes');
import imageService = require('../image');
import protectedSessionService = require('../protected_session');
import htmlSanitizer = require('../html_sanitizer');
import sanitizeAttributeName = require('../sanitize_attribute_name');
import TaskContext = require("../task_context");
import BNote = require("../../becca/entities/bnote");
import { File } from "./common";
/**
* date format is e.g. 20181121T193703Z or 2013-04-14T16:19:00.000Z (Mac evernote, see #3496)
* @returns trilium date format, e.g. 2013-04-14 16:19:00.000Z
*/
function parseDate(text) {
function parseDate(text: string) {
// convert ISO format to the "20181121T193703Z" format
text = text.replace(/[-:]/g, "");
@ -25,10 +28,34 @@ function parseDate(text) {
return text;
}
let note = {};
let resource;
interface Attribute {
type: string;
name: string;
value: string;
}
function importEnex(taskContext, file, parentNote) {
interface Resource {
title: string;
content?: Buffer | string;
mime?: string;
attributes: Attribute[];
}
interface Note {
title: string;
attributes: Attribute[];
utcDateCreated: string;
utcDateModified: string;
noteId: string;
blobId: string;
content: string;
resources: Resource[]
}
let note: Partial<Note> = {};
let resource: Resource;
function importEnex(taskContext: TaskContext, file: File, parentNote: BNote) {
const saxStream = sax.createStream(true);
const rootNoteTitle = file.originalname.toLowerCase().endsWith(".enex")
@ -45,7 +72,7 @@ function importEnex(taskContext, file, parentNote) {
isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable(),
}).note;
function extractContent(content) {
function extractContent(content: string) {
const openingNoteIndex = content.indexOf('<en-note>');
if (openingNoteIndex !== -1) {
@ -90,7 +117,7 @@ function importEnex(taskContext, file, parentNote) {
}
const path = [];
const path: string[] = [];
function getCurrentTag() {
if (path.length >= 1) {
@ -108,8 +135,8 @@ function importEnex(taskContext, file, parentNote) {
// unhandled errors will throw, since this is a proper node event emitter.
log.error(`error when parsing ENEX file: ${e}`);
// clear the error
this._parser.error = null;
this._parser.resume();
(saxStream._parser as any).error = null;
saxStream._parser.resume();
});
saxStream.on("text", text => {
@ -123,13 +150,15 @@ function importEnex(taskContext, file, parentNote) {
labelName = 'pageUrl';
}
labelName = sanitizeAttributeName(labelName);
labelName = sanitizeAttributeName.sanitizeAttributeName(labelName || "");
note.attributes.push({
type: 'label',
name: labelName,
value: text
});
if (note.attributes) {
note.attributes.push({
type: 'label',
name: labelName,
value: text
});
}
}
else if (previousTag === 'resource-attributes') {
if (currentTag === 'file-name') {
@ -169,10 +198,10 @@ function importEnex(taskContext, file, parentNote) {
note.utcDateCreated = parseDate(text);
} else if (currentTag === 'updated') {
note.utcDateModified = parseDate(text);
} else if (currentTag === 'tag') {
} else if (currentTag === 'tag' && note.attributes) {
note.attributes.push({
type: 'label',
name: sanitizeAttributeName(text),
name: sanitizeAttributeName.sanitizeAttributeName(text),
value: ''
})
}
@ -201,11 +230,13 @@ function importEnex(taskContext, file, parentNote) {
attributes: []
};
note.resources.push(resource);
if (note.resources) {
note.resources.push(resource);
}
}
});
function updateDates(note, utcDateCreated, utcDateModified) {
function updateDates(note: BNote, utcDateCreated?: string, utcDateModified?: string) {
// it's difficult to force custom dateCreated and dateModified to Note entity, so we do it post-creation with SQL
sql.execute(`
UPDATE notes
@ -227,6 +258,10 @@ function importEnex(taskContext, file, parentNote) {
// make a copy because stream continues with the next call and note gets overwritten
let {title, content, attributes, resources, utcDateCreated, utcDateModified} = note;
if (!title || !content) {
throw new Error("Missing title or content for note.");
}
content = extractContent(content);
const noteEntity = noteService.createNewNote({
@ -239,7 +274,7 @@ function importEnex(taskContext, file, parentNote) {
isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable(),
}).note;
for (const attr of attributes) {
for (const attr of attributes || []) {
noteEntity.addAttribute(attr.type, attr.name, attr.value);
}
@ -249,12 +284,14 @@ function importEnex(taskContext, file, parentNote) {
taskContext.increaseProgressCount();
for (const resource of resources) {
for (const resource of resources || []) {
if (!resource.content) {
continue;
}
resource.content = utils.fromBase64(resource.content);
if (typeof resource.content === "string") {
resource.content = utils.fromBase64(resource.content);
}
const hash = utils.md5(resource.content);
@ -273,6 +310,10 @@ function importEnex(taskContext, file, parentNote) {
resource.mime = resource.mime || "application/octet-stream";
const createFileNote = () => {
if (typeof resource.content !== "string") {
throw new Error("Missing or wrong content type for resource.");
}
const resourceNote = noteService.createNewNote({
parentNoteId: noteEntity.noteId,
title: resource.title,
@ -292,7 +333,7 @@ function importEnex(taskContext, file, parentNote) {
const resourceLink = `<a href="#root/${resourceNote.noteId}">${utils.escapeHtml(resource.title)}</a>`;
content = content.replace(mediaRegex, resourceLink);
content = (content || "").replace(mediaRegex, resourceLink);
};
if (resource.mime && resource.mime.startsWith('image/')) {
@ -301,7 +342,7 @@ function importEnex(taskContext, file, parentNote) {
? resource.title
: `image.${resource.mime.substr(6)}`; // default if real name is not present
const attachment = imageService.saveImageToAttachment(noteEntity.noteId, resource.content, originalName, taskContext.data.shrinkImages);
const attachment = imageService.saveImageToAttachment(noteEntity.noteId, resource.content, originalName, !!taskContext.data?.shrinkImages);
const encodedTitle = encodeURIComponent(attachment.title);
const url = `api/attachments/${attachment.attachmentId}/image/${encodedTitle}`;
@ -314,7 +355,7 @@ function importEnex(taskContext, file, parentNote) {
// otherwise the image would be removed since no note would include it
content += imageLink;
}
} catch (e) {
} catch (e: any) {
log.error(`error when saving image from ENEX file: ${e.message}`);
createFileNote();
}
@ -368,4 +409,4 @@ function importEnex(taskContext, file, parentNote) {
});
}
module.exports = { importEnex };
export = { importEnex };

View File

@ -1,18 +0,0 @@
"use strict";
const marked = require("marked");
const htmlSanitizer = require('../html_sanitizer');
const importUtils = require('./utils');
function renderToHtml(content, title) {
const html = marked.parse(content, {
mangle: false,
headerIds: false
});
const h1Handled = importUtils.handleH1(html, title); // h1 handling needs to come before sanitization
return htmlSanitizer.sanitize(h1Handled);
}
module.exports = {
renderToHtml
};

View File

@ -0,0 +1,17 @@
"use strict";
import marked = require("marked");
import htmlSanitizer = require('../html_sanitizer');
import importUtils = require('./utils');
function renderToHtml(content: string, title: string) {
const html = marked.parse(content, {
async: false
}) as string;
const h1Handled = importUtils.handleH1(html, title); // h1 handling needs to come before sanitization
return htmlSanitizer.sanitize(h1Handled);
}
export = {
renderToHtml
};

View File

@ -1,9 +1,10 @@
"use strict";
const mimeTypes = require('mime-types');
const path = require('path');
import mimeTypes = require('mime-types');
import path = require('path');
import { TaskData } from '../task_context_interface';
const CODE_MIME_TYPES = {
const CODE_MIME_TYPES: Record<string, boolean | string> = {
'text/plain': true,
'text/x-csrc': true,
'text/x-c++src': true,
@ -44,7 +45,7 @@ const CODE_MIME_TYPES = {
};
// extensions missing in mime-db
const EXTENSION_TO_MIME = {
const EXTENSION_TO_MIME: Record<string, string> = {
".c": "text/x-csrc",
".cs": "text/x-csharp",
".clj": "text/x-clojure",
@ -65,7 +66,7 @@ const EXTENSION_TO_MIME = {
};
/** @returns false if MIME is not detected */
function getMime(fileName) {
function getMime(fileName: string) {
if (fileName.toLowerCase() === 'dockerfile') {
return "text/x-dockerfile";
}
@ -79,7 +80,7 @@ function getMime(fileName) {
return mimeTypes.lookup(fileName);
}
function getType(options, mime) {
function getType(options: TaskData, mime: string) {
mime = mime ? mime.toLowerCase() : '';
if (options.textImportedAsText && (mime === 'text/html' || ['text/markdown', 'text/x-markdown'].includes(mime))) {
@ -96,18 +97,20 @@ function getType(options, mime) {
}
}
function normalizeMimeType(mime) {
function normalizeMimeType(mime: string) {
mime = mime ? mime.toLowerCase() : '';
const mappedMime = CODE_MIME_TYPES[mime];
if (!(mime in CODE_MIME_TYPES) || CODE_MIME_TYPES[mime] === true) {
if (mappedMime === true) {
return mime;
} else if (typeof mappedMime === "string") {
return mappedMime;
}
else {
return CODE_MIME_TYPES[mime];
}
return undefined;
}
module.exports = {
export = {
getMime,
getType,
normalizeMimeType

View File

@ -1,20 +1,37 @@
"use strict";
const noteService = require('../../services/notes');
const parseString = require('xml2js').parseString;
const protectedSessionService = require('../protected_session');
const htmlSanitizer = require('../html_sanitizer');
import noteService = require('../../services/notes');
import xml2js = require("xml2js");
import protectedSessionService = require('../protected_session');
import htmlSanitizer = require('../html_sanitizer');
import TaskContext = require('../task_context');
import BNote = require('../../becca/entities/bnote');
const parseString = xml2js.parseString;
/**
* @param {TaskContext} taskContext
* @param {Buffer} fileBuffer
* @param {BNote} parentNote
* @returns {Promise<*[]|*>}
*/
async function importOpml(taskContext, fileBuffer, parentNote) {
const xml = await new Promise(function(resolve, reject)
interface OpmlXml {
opml: OpmlBody;
}
interface OpmlBody {
$: {
version: string
}
body: OpmlOutline[]
}
interface OpmlOutline {
$: {
title: string;
text: string;
_note: string;
};
outline: OpmlOutline[];
}
async function importOpml(taskContext: TaskContext, fileBuffer: Buffer, parentNote: BNote) {
const xml = await new Promise<OpmlXml>(function(resolve, reject)
{
parseString(fileBuffer, function (err, result) {
parseString(fileBuffer, function (err: any, result: OpmlXml) {
if (err) {
reject(err);
}
@ -30,7 +47,7 @@ async function importOpml(taskContext, fileBuffer, parentNote) {
const opmlVersion = parseInt(xml.opml.$.version);
function importOutline(outline, parentNoteId) {
function importOutline(outline: OpmlOutline, parentNoteId: string) {
let title, content;
if (opmlVersion === 1) {
@ -83,7 +100,7 @@ async function importOpml(taskContext, fileBuffer, parentNote) {
return returnNote;
}
function toHtml(text) {
function toHtml(text: string) {
if (!text) {
return '';
}
@ -91,6 +108,6 @@ function toHtml(text) {
return `<p>${text.replace(/(?:\r\n|\r|\n)/g, '</p><p>')}</p>`;
}
module.exports = {
export = {
importOpml
};

View File

@ -1,18 +1,22 @@
"use strict";
const noteService = require('../../services/notes');
const imageService = require('../../services/image.js');
const protectedSessionService = require('../protected_session');
const markdownService = require('./markdown.js');
const mimeService = require('./mime.js');
const utils = require('../../services/utils');
const importUtils = require('./utils');
const htmlSanitizer = require('../html_sanitizer');
import BNote = require("../../becca/entities/bnote");
import TaskContext = require("../task_context");
function importSingleFile(taskContext, file, parentNote) {
import noteService = require('../../services/notes');
import imageService = require('../../services/image');
import protectedSessionService = require('../protected_session');
import markdownService = require('./markdown');
import mimeService = require('./mime');
import utils = require('../../services/utils');
import importUtils = require('./utils');
import htmlSanitizer = require('../html_sanitizer');
import { File } from "./common";
function importSingleFile(taskContext: TaskContext, file: File, parentNote: BNote) {
const mime = mimeService.getMime(file.originalname) || file.mimetype;
if (taskContext.data.textImportedAsText) {
if (taskContext?.data?.textImportedAsText) {
if (mime === 'text/html') {
return importHtml(taskContext, file, parentNote);
} else if (['text/markdown', 'text/x-markdown'].includes(mime)) {
@ -22,7 +26,7 @@ function importSingleFile(taskContext, file, parentNote) {
}
}
if (taskContext.data.codeImportedAsCode && mimeService.getType(taskContext.data, mime) === 'code') {
if (taskContext?.data?.codeImportedAsCode && mimeService.getType(taskContext.data, mime) === 'code') {
return importCodeNote(taskContext, file, parentNote);
}
@ -33,15 +37,21 @@ function importSingleFile(taskContext, file, parentNote) {
return importFile(taskContext, file, parentNote);
}
function importImage(file, parentNote, taskContext) {
const {note} = imageService.saveImage(parentNote.noteId, file.buffer, file.originalname, taskContext.data.shrinkImages);
function importImage(file: File, parentNote: BNote, taskContext: TaskContext) {
if (typeof file.buffer === "string") {
throw new Error("Invalid file content for image.");
}
const {note} = imageService.saveImage(parentNote.noteId, file.buffer, file.originalname, !!taskContext.data?.shrinkImages);
taskContext.increaseProgressCount();
return note;
}
function importFile(taskContext, file, parentNote) {
function importFile(taskContext: TaskContext, file: File, parentNote: BNote) {
if (typeof file.buffer !== "string") {
throw new Error("Invalid file content for text.");
}
const originalName = file.originalname;
const {note} = noteService.createNewNote({
@ -60,8 +70,8 @@ function importFile(taskContext, file, parentNote) {
return note;
}
function importCodeNote(taskContext, file, parentNote) {
const title = utils.getNoteTitle(file.originalname, taskContext.data.replaceUnderscoresWithSpaces);
function importCodeNote(taskContext: TaskContext, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const content = file.buffer.toString("utf-8");
const detectedMime = mimeService.getMime(file.originalname) || file.mimetype;
const mime = mimeService.normalizeMimeType(detectedMime);
@ -80,8 +90,8 @@ function importCodeNote(taskContext, file, parentNote) {
return note;
}
function importPlainText(taskContext, file, parentNote) {
const title = utils.getNoteTitle(file.originalname, taskContext.data.replaceUnderscoresWithSpaces);
function importPlainText(taskContext: TaskContext, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const plainTextContent = file.buffer.toString("utf-8");
const htmlContent = convertTextToHtml(plainTextContent);
@ -99,7 +109,7 @@ function importPlainText(taskContext, file, parentNote) {
return note;
}
function convertTextToHtml(text) {
function convertTextToHtml(text: string) {
// 1: Plain Text Search
text = text.replace(/&/g, "&amp;").
replace(/</g, "&lt;").
@ -117,13 +127,13 @@ function convertTextToHtml(text) {
return text;
}
function importMarkdown(taskContext, file, parentNote) {
const title = utils.getNoteTitle(file.originalname, taskContext.data.replaceUnderscoresWithSpaces);
function importMarkdown(taskContext: TaskContext, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const markdownContent = file.buffer.toString("utf-8");
let htmlContent = markdownService.renderToHtml(markdownContent, title);
if (taskContext.data.safeImport) {
if (taskContext.data?.safeImport) {
htmlContent = htmlSanitizer.sanitize(htmlContent);
}
@ -141,11 +151,11 @@ function importMarkdown(taskContext, file, parentNote) {
return note;
}
function importHtml(taskContext, file, parentNote) {
const title = utils.getNoteTitle(file.originalname, taskContext.data.replaceUnderscoresWithSpaces);
function importHtml(taskContext: TaskContext, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
let content = file.buffer.toString("utf-8");
if (taskContext.data.safeImport) {
if (taskContext?.data?.safeImport) {
content = htmlSanitizer.sanitize(content);
}
@ -165,17 +175,11 @@ function importHtml(taskContext, file, parentNote) {
return note;
}
/**
* @param {TaskContext} taskContext
* @param file
* @param {BNote} parentNote
* @returns {BNote}
*/
function importAttachment(taskContext, file, parentNote) {
function importAttachment(taskContext: TaskContext, file: File, parentNote: BNote) {
const mime = mimeService.getMime(file.originalname) || file.mimetype;
if (mime.startsWith("image/")) {
imageService.saveImageToAttachment(parentNote.noteId, file.buffer, file.originalname, taskContext.data.shrinkImages);
if (mime.startsWith("image/") && typeof file.buffer !== "string") {
imageService.saveImageToAttachment(parentNote.noteId, file.buffer, file.originalname, taskContext.data?.shrinkImages);
taskContext.increaseProgressCount();
} else {
@ -190,7 +194,7 @@ function importAttachment(taskContext, file, parentNote) {
}
}
module.exports = {
export = {
importSingleFile,
importAttachment
};

View File

@ -1,6 +1,6 @@
"use strict";
function handleH1(content, title) {
function handleH1(content: string, title: string) {
content = content.replace(/<h1>([^<]*)<\/h1>/gi, (match, text) => {
if (title.trim() === text.trim()) {
return ""; // remove whole H1 tag
@ -11,6 +11,6 @@ function handleH1(content, title) {
return content;
}
module.exports = {
export = {
handleH1
};

View File

@ -1,43 +1,45 @@
"use strict";
const BAttribute = require('../../becca/entities/battribute');
const utils = require('../../services/utils');
const log = require('../../services/log');
const noteService = require('../../services/notes');
const attributeService = require('../../services/attributes');
const BBranch = require('../../becca/entities/bbranch');
const path = require('path');
const protectedSessionService = require('../protected_session');
const mimeService = require('./mime.js');
const treeService = require('../tree');
const yauzl = require("yauzl");
const htmlSanitizer = require('../html_sanitizer');
const becca = require('../../becca/becca');
const BAttachment = require('../../becca/entities/battachment');
const markdownService = require('./markdown.js');
import BAttribute = require('../../becca/entities/battribute');
import utils = require('../../services/utils');
import log = require('../../services/log');
import noteService = require('../../services/notes');
import attributeService = require('../../services/attributes');
import BBranch = require('../../becca/entities/bbranch');
import path = require('path');
import protectedSessionService = require('../protected_session');
import mimeService = require('./mime');
import treeService = require('../tree');
import yauzl = require("yauzl");
import htmlSanitizer = require('../html_sanitizer');
import becca = require('../../becca/becca');
import BAttachment = require('../../becca/entities/battachment');
import markdownService = require('./markdown');
import TaskContext = require('../task_context');
import BNote = require('../../becca/entities/bnote');
import NoteMeta = require('../meta/note_meta');
import AttributeMeta = require('../meta/attribute_meta');
import { Stream } from 'stream';
import { NoteType } from '../../becca/entities/rows';
/**
* @param {TaskContext} taskContext
* @param {Buffer} fileBuffer
* @param {BNote} importRootNote
* @returns {Promise<BNote>}
*/
async function importZip(taskContext, fileBuffer, importRootNote) {
/** @type {Object.<string, string>} maps from original noteId (in ZIP file) to newly generated noteId */
const noteIdMap = {};
/** @type {Object.<string, string>} maps from original attachmentId (in ZIP file) to newly generated attachmentId */
const attachmentIdMap = {};
const attributes = [];
interface MetaFile {
files: NoteMeta[]
}
async function importZip(taskContext: TaskContext, fileBuffer: Buffer, importRootNote: BNote): Promise<BNote> {
/** maps from original noteId (in ZIP file) to newly generated noteId */
const noteIdMap: Record<string, string> = {};
/** type maps from original attachmentId (in ZIP file) to newly generated attachmentId */
const attachmentIdMap: Record<string, string> = {};
const attributes: AttributeMeta[] = [];
// path => noteId, used only when meta file is not available
/** @type {Object.<string, string>} path => noteId | attachmentId */
const createdPaths = { '/': importRootNote.noteId, '\\': importRootNote.noteId };
let metaFile = null;
/** @type {BNote} */
let firstNote = null;
/** @type {Set.<string>} */
const createdNoteIds = new Set();
/** path => noteId | attachmentId */
const createdPaths: Record<string, string> = { '/': importRootNote.noteId, '\\': importRootNote.noteId };
let metaFile: MetaFile | null = null;
let firstNote: BNote | null = null;
const createdNoteIds = new Set<string>();
function getNewNoteId(origNoteId) {
function getNewNoteId(origNoteId: string) {
if (!origNoteId.trim()) {
// this probably shouldn't happen, but still good to have this precaution
return "empty_note_id";
@ -55,7 +57,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return noteIdMap[origNoteId];
}
function getNewAttachmentId(origAttachmentId) {
function getNewAttachmentId(origAttachmentId: string) {
if (!origAttachmentId.trim()) {
// this probably shouldn't happen, but still good to have this precaution
return "empty_attachment_id";
@ -68,12 +70,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return attachmentIdMap[origAttachmentId];
}
/**
* @param {NoteMeta} parentNoteMeta
* @param {string} dataFileName
*/
function getAttachmentMeta(parentNoteMeta, dataFileName) {
for (const noteMeta of parentNoteMeta.children) {
function getAttachmentMeta(parentNoteMeta: NoteMeta, dataFileName: string) {
for (const noteMeta of parentNoteMeta.children || []) {
for (const attachmentMeta of noteMeta.attachments || []) {
if (attachmentMeta.dataFileName === dataFileName) {
return {
@ -88,22 +86,20 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return {};
}
/** @returns {{noteMeta: NoteMeta|undefined, parentNoteMeta: NoteMeta|undefined, attachmentMeta: AttachmentMeta|undefined}} */
function getMeta(filePath) {
function getMeta(filePath: string) {
if (!metaFile) {
return {};
}
const pathSegments = filePath.split(/[\/\\]/g);
/** @type {NoteMeta} */
let cursor = {
let cursor: NoteMeta | undefined = {
isImportRoot: true,
children: metaFile.files
children: metaFile.files,
dataFileName: ""
};
/** @type {NoteMeta} */
let parent;
let parent: NoteMeta | undefined = undefined;
for (const segment of pathSegments) {
if (!cursor?.children?.length) {
@ -111,7 +107,9 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
}
parent = cursor;
cursor = parent.children.find(file => file.dataFileName === segment || file.dirFileName === segment);
if (parent.children) {
cursor = parent.children.find(file => file.dataFileName === segment || file.dirFileName === segment);
}
if (!cursor) {
return getAttachmentMeta(parent, segment);
@ -120,19 +118,15 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return {
parentNoteMeta: parent,
noteMeta: cursor
noteMeta: cursor,
attachmentMeta: null
};
}
/**
* @param {string} filePath
* @param {NoteMeta} parentNoteMeta
* @return {string}
*/
function getParentNoteId(filePath, parentNoteMeta) {
function getParentNoteId(filePath: string, parentNoteMeta?: NoteMeta) {
let parentNoteId;
if (parentNoteMeta) {
if (parentNoteMeta?.noteId) {
parentNoteId = parentNoteMeta.isImportRoot ? importRootNote.noteId : getNewNoteId(parentNoteMeta.noteId);
}
else {
@ -151,13 +145,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return parentNoteId;
}
/**
* @param {NoteMeta} noteMeta
* @param {string} filePath
* @return {string}
*/
function getNoteId(noteMeta, filePath) {
if (noteMeta) {
function getNoteId(noteMeta: NoteMeta | undefined, filePath: string): string {
if (noteMeta?.noteId) {
return getNewNoteId(noteMeta.noteId);
}
@ -176,23 +165,19 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return noteId;
}
function detectFileTypeAndMime(taskContext, filePath) {
function detectFileTypeAndMime(taskContext: TaskContext, filePath: string) {
const mime = mimeService.getMime(filePath) || "application/octet-stream";
const type = mimeService.getType(taskContext.data, mime);
const type = mimeService.getType(taskContext.data || {}, mime);
return { mime, type };
}
/**
* @param {BNote} note
* @param {NoteMeta} noteMeta
*/
function saveAttributes(note, noteMeta) {
function saveAttributes(note: BNote, noteMeta: NoteMeta | undefined) {
if (!noteMeta) {
return;
}
for (const attr of noteMeta.attributes) {
for (const attr of noteMeta.attributes || []) {
attr.noteId = note.noteId;
if (attr.type === 'label-definition') {
@ -218,11 +203,11 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
attr.value = getNewNoteId(attr.value);
}
if (taskContext.data.safeImport && attributeService.isAttributeDangerous(attr.type, attr.name)) {
if (taskContext.data?.safeImport && attributeService.isAttributeDangerous(attr.type, attr.name)) {
attr.name = `disabled:${attr.name}`;
}
if (taskContext.data.safeImport) {
if (taskContext.data?.safeImport) {
attr.name = htmlSanitizer.sanitize(attr.name);
attr.value = htmlSanitizer.sanitize(attr.value);
}
@ -231,7 +216,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
}
}
function saveDirectory(filePath) {
function saveDirectory(filePath: string) {
const { parentNoteMeta, noteMeta } = getMeta(filePath);
const noteId = getNoteId(noteMeta, filePath);
@ -240,12 +225,16 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return;
}
const noteTitle = utils.getNoteTitle(filePath, taskContext.data.replaceUnderscoresWithSpaces, noteMeta);
const noteTitle = utils.getNoteTitle(filePath, !!taskContext.data?.replaceUnderscoresWithSpaces, noteMeta);
const parentNoteId = getParentNoteId(filePath, parentNoteMeta);
if (!parentNoteId) {
throw new Error("Missing parent note ID.");
}
const {note} = noteService.createNewNote({
parentNoteId: parentNoteId,
title: noteTitle,
title: noteTitle || "",
content: '',
noteId: noteId,
type: resolveNoteType(noteMeta?.type),
@ -265,8 +254,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return noteId;
}
/** @returns {{attachmentId: string}|{noteId: string}} */
function getEntityIdFromRelativeUrl(url, filePath) {
function getEntityIdFromRelativeUrl(url: string, filePath: string) {
while (url.startsWith("./")) {
url = url.substr(2);
}
@ -287,7 +275,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
const { noteMeta, attachmentMeta } = getMeta(absUrl);
if (attachmentMeta) {
if (attachmentMeta && attachmentMeta.attachmentId && noteMeta.noteId) {
return {
attachmentId: getNewAttachmentId(attachmentMeta.attachmentId),
noteId: getNewNoteId(noteMeta.noteId)
@ -299,15 +287,8 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
}
}
/**
* @param {string} content
* @param {string} noteTitle
* @param {string} filePath
* @param {NoteMeta} noteMeta
* @return {string}
*/
function processTextNoteContent(content, noteTitle, filePath, noteMeta) {
function isUrlAbsolute(url) {
function processTextNoteContent(content: string, noteTitle: string, filePath: string, noteMeta?: NoteMeta) {
function isUrlAbsolute(url: string) {
return /^(?:[a-z]+:)?\/\//i.test(url);
}
@ -321,7 +302,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
}
});
if (taskContext.data.safeImport) {
if (taskContext.data?.safeImport) {
content = htmlSanitizer.sanitize(content);
}
@ -336,7 +317,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
try {
url = decodeURIComponent(url).trim();
} catch (e) {
} catch (e: any) {
log.error(`Cannot parse image URL '${url}', keeping original. Error: ${e.message}.`);
return `src="${url}"`;
}
@ -359,7 +340,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
content = content.replace(/href="([^"]*)"/g, (match, url) => {
try {
url = decodeURIComponent(url).trim();
} catch (e) {
} catch (e: any) {
log.error(`Cannot parse link URL '${url}', keeping original. Error: ${e.message}.`);
return `href="${url}"`;
}
@ -395,7 +376,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return content;
}
function removeTriliumTags(content) {
function removeTriliumTags(content: string) {
const tagsToRemove = [
'<h1 data-trilium-h1>([^<]*)<\/h1>',
'<title data-trilium-title>([^<]*)<\/title>'
@ -407,26 +388,18 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return content;
}
/**
* @param {NoteMeta} noteMeta
* @param {string} type
* @param {string} mime
* @param {string|Buffer} content
* @param {string} noteTitle
* @param {string} filePath
* @return {string}
*/
function processNoteContent(noteMeta, type, mime, content, noteTitle, filePath) {
if (noteMeta?.format === 'markdown'
|| (!noteMeta && taskContext.data.textImportedAsText && ['text/markdown', 'text/x-markdown'].includes(mime))) {
function processNoteContent(noteMeta: NoteMeta | undefined, type: string, mime: string, content: string | Buffer, noteTitle: string, filePath: string) {
if ((noteMeta?.format === 'markdown'
|| (!noteMeta && taskContext.data?.textImportedAsText && ['text/markdown', 'text/x-markdown'].includes(mime)))
&& typeof content === "string") {
content = markdownService.renderToHtml(content, noteTitle);
}
if (type === 'text') {
if (type === 'text' && typeof content === "string") {
content = processTextNoteContent(content, noteTitle, filePath, noteMeta);
}
if (type === 'relationMap' && noteMeta) {
if (type === 'relationMap' && noteMeta && typeof content === "string") {
const relationMapLinks = (noteMeta.attributes || [])
.filter(attr => attr.type === 'relation' && attr.name === 'relationMapLink');
@ -440,11 +413,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return content;
}
/**
* @param {string} filePath
* @param {Buffer} content
*/
function saveNote(filePath, content) {
function saveNote(filePath: string, content: string | Buffer) {
const { parentNoteMeta, noteMeta, attachmentMeta } = getMeta(filePath);
if (noteMeta?.noImport) {
@ -453,7 +422,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
const noteId = getNoteId(noteMeta, filePath);
if (attachmentMeta) {
if (attachmentMeta && attachmentMeta.attachmentId) {
const attachment = new BAttachment({
attachmentId: getNewAttachmentId(attachmentMeta.attachmentId),
ownerId: noteId,
@ -487,16 +456,20 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
return;
}
let { type, mime } = noteMeta ? noteMeta : detectFileTypeAndMime(taskContext, filePath);
type = resolveNoteType(type);
let { mime } = noteMeta ? noteMeta : detectFileTypeAndMime(taskContext, filePath);
if (!mime) {
throw new Error("Unable to resolve mime type.");
}
let type = resolveNoteType(noteMeta?.type);
if (type !== 'file' && type !== 'image') {
content = content.toString("utf-8");
}
const noteTitle = utils.getNoteTitle(filePath, taskContext.data.replaceUnderscoresWithSpaces, noteMeta);
const noteTitle = utils.getNoteTitle(filePath, taskContext.data?.replaceUnderscoresWithSpaces || false, noteMeta);
content = processNoteContent(noteMeta, type, mime, content, noteTitle, filePath);
content = processNoteContent(noteMeta, type, mime, content, noteTitle || "", filePath);
let note = becca.getNote(noteId);
@ -508,7 +481,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
if (note.type === undefined) {
note.type = type;
note.mime = mime;
note.title = noteTitle;
note.title = noteTitle || "";
note.isProtected = isProtected;
note.save();
}
@ -519,16 +492,20 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
new BBranch({
noteId,
parentNoteId,
isExpanded: noteMeta.isExpanded,
prefix: noteMeta.prefix,
notePosition: noteMeta.notePosition
isExpanded: noteMeta?.isExpanded,
prefix: noteMeta?.prefix,
notePosition: noteMeta?.notePosition
}).save();
}
}
else {
if (typeof content !== "string") {
throw new Error("Incorrect content type.");
}
({note} = noteService.createNewNote({
parentNoteId: parentNoteId,
title: noteTitle,
title: noteTitle || "",
content: content,
noteId,
type,
@ -560,7 +537,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
// we're running two passes to make sure that the meta file is loaded before the rest of the files is processed.
await readZipFile(fileBuffer, async (zipfile, entry) => {
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
const filePath = normalizeFilePath(entry.fileName);
if (filePath === '!!!meta.json') {
@ -572,7 +549,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
zipfile.readEntry();
});
await readZipFile(fileBuffer, async (zipfile, entry) => {
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
const filePath = normalizeFilePath(entry.fileName);
if (/\/$/.test(entry.fileName)) {
@ -590,6 +567,7 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
for (const noteId of createdNoteIds) {
const note = becca.getNote(noteId);
if (!note) continue;
await noteService.asyncPostProcessContent(note, note.getContent());
if (!metaFile) {
@ -612,11 +590,15 @@ async function importZip(taskContext, fileBuffer, importRootNote) {
}
}
if (!firstNote) {
throw new Error("Unable to determine first note.");
}
return firstNote;
}
/** @returns {string} path without leading or trailing slash and backslashes converted to forward ones */
function normalizeFilePath(filePath) {
/** @returns path without leading or trailing slash and backslashes converted to forward ones */
function normalizeFilePath(filePath: string): string {
filePath = filePath.replace(/\\/g, "/");
if (filePath.startsWith("/")) {
@ -630,29 +612,30 @@ function normalizeFilePath(filePath) {
return filePath;
}
/** @returns {Promise<Buffer>} */
function streamToBuffer(stream) {
const chunks = [];
function streamToBuffer(stream: Stream): Promise<Buffer> {
const chunks: Uint8Array[] = [];
stream.on('data', chunk => chunks.push(chunk));
return new Promise((res, rej) => stream.on('end', () => res(Buffer.concat(chunks))));
}
/** @returns {Promise<Buffer>} */
function readContent(zipfile, entry) {
function readContent(zipfile: yauzl.ZipFile, entry: yauzl.Entry): Promise<Buffer> {
return new Promise((res, rej) => {
zipfile.openReadStream(entry, function(err, readStream) {
if (err) rej(err);
if (!readStream) throw new Error("Unable to read content.");
streamToBuffer(readStream).then(res);
});
});
}
function readZipFile(buffer, processEntryCallback) {
function readZipFile(buffer: Buffer, processEntryCallback: (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => void) {
return new Promise((res, rej) => {
yauzl.fromBuffer(buffer, {lazyEntries: true, validateEntrySizes: false}, function(err, zipfile) {
if (err) throw err;
if (!zipfile) throw new Error("Unable to read zip file.");
zipfile.readEntry();
zipfile.on("entry", entry => processEntryCallback(zipfile, entry));
zipfile.on("end", res);
@ -660,20 +643,19 @@ function readZipFile(buffer, processEntryCallback) {
});
}
function resolveNoteType(type) {
function resolveNoteType(type: string | undefined): NoteType {
// BC for ZIPs created in Triliun 0.57 and older
if (type === 'relation-map') {
type = 'relationMap';
return 'relationMap';
} else if (type === 'note-map') {
type = 'noteMap';
return 'noteMap';
} else if (type === 'web-view') {
type = 'webView';
return 'webView';
}
return type || "text";
return "text";
}
module.exports = {
export = {
importZip
};

View File

@ -1,9 +1,9 @@
interface AttachmentMeta {
attachmentId: string;
attachmentId?: string;
title: string;
role: string;
mime: string;
position: number;
position?: number;
dataFileName: string;
}

View File

@ -1,9 +1,12 @@
import { AttributeType } from "../../becca/entities/rows";
interface AttributeMeta {
type: string;
noteId?: string;
type: AttributeType;
name: string;
value: string;
isInheritable: boolean;
position: number;
isInheritable?: boolean;
position?: number;
}
export = AttributeMeta;

View File

@ -1,23 +1,25 @@
import AttachmentMeta = require("./attachment_meta");
import AttributeMeta = require("./attribute_meta");
interface NoteMeta {
noteId: string;
notePath: string;
isClone: boolean;
title: string;
notePosition: number;
prefix: string;
isExpanded: boolean;
type: string;
mime: string;
noteId?: string;
notePath?: string[];
isClone?: boolean;
title?: string;
notePosition?: number;
prefix?: string | null;
isExpanded?: boolean;
type?: string;
mime?: string;
/** 'html' or 'markdown', applicable to text notes only */
format: "html" | "markdown";
format?: "html" | "markdown";
dataFileName: string;
dirFileName: string;
dirFileName?: string;
/** this file should not be imported (e.g., HTML navigation) */
noImport: boolean;
attributes: AttributeMeta[];
attachments: AttributeMeta[];
noImport?: boolean;
isImportRoot?: boolean;
attributes?: AttributeMeta[];
attachments?: AttachmentMeta[];
children?: NoteMeta[];
}

View File

@ -0,0 +1,25 @@
import { NoteType } from "../becca/entities/rows";
export interface NoteParams {
/** optionally can force specific noteId */
noteId?: string;
parentNoteId: string;
templateNoteId?: string;
title: string;
content: string;
type: NoteType;
/** default value is derived from default mimes for type */
mime?: string;
/** default is false */
isProtected?: boolean;
/** default is false */
isExpanded?: boolean;
/** default is empty string */
prefix?: string;
/** default is the last existing notePosition in a parent + 10 */
notePosition?: number;
dateCreated?: string;
utcDateCreated?: string;
ignoreForbiddenParents?: boolean;
target?: "into";
}

View File

@ -155,6 +155,7 @@ function getAndValidateParent(params: GetValidateParams) {
interface NoteParams {
/** optionally can force specific noteId */
noteId?: string;
branchId?: string;
parentNoteId: string;
templateNoteId?: string;
title: string;
@ -167,7 +168,7 @@ interface NoteParams {
/** default is false */
isExpanded?: boolean;
/** default is empty string */
prefix?: string;
prefix?: string | null;
/** default is the last existing notePosition in a parent + 10 */
notePosition?: number;
dateCreated?: string;
@ -506,7 +507,7 @@ async function downloadImage(noteId: string, imageUrl: string) {
const parsedUrl = url.parse(unescapedUrl);
const title = path.basename(parsedUrl.pathname || "");
const imageService = require('../services/image.js');
const imageService = require('../services/image');
const attachment = imageService.saveImageToAttachment(noteId, imageBuffer, title, true, true);
imageUrlToAttachmentIdMapping[imageUrl] = attachment.attachmentId;
@ -539,7 +540,7 @@ function downloadImages(noteId: string, content: string) {
const imageBase64 = url.substr(inlineImageMatch[0].length);
const imageBuffer = Buffer.from(imageBase64, 'base64');
const imageService = require('../services/image.js');
const imageService = require('../services/image');
const attachment = imageService.saveImageToAttachment(noteId, imageBuffer, "inline image", true, true);
const encodedTitle = encodeURIComponent(attachment.title);
@ -657,7 +658,7 @@ function saveAttachments(note: BNote, content: string) {
return content;
}
function saveLinks(note: BNote, content: string) {
function saveLinks(note: BNote, content: string | Buffer) {
if ((note.type !== 'text' && note.type !== 'relationMap')
|| (note.isProtected && !protectedSessionService.isProtectedSessionAvailable())) {
return {
@ -669,7 +670,7 @@ function saveLinks(note: BNote, content: string) {
const foundLinks: FoundLink[] = [];
let forceFrontendReload = false;
if (note.type === 'text') {
if (note.type === 'text' && typeof content === "string") {
content = downloadImages(note.noteId, content);
content = saveAttachments(note, content);
@ -679,7 +680,7 @@ function saveLinks(note: BNote, content: string) {
({forceFrontendReload, content} = checkImageAttachments(note, content));
}
else if (note.type === 'relationMap') {
else if (note.type === 'relationMap' && typeof content === "string") {
findRelationMapLinks(content, foundLinks);
}
else {
@ -874,7 +875,7 @@ function getUndeletedParentBranchIds(noteId: string, deleteId: string) {
AND parentNote.isDeleted = 0`, [noteId, deleteId]);
}
function scanForLinks(note: BNote, content: string) {
function scanForLinks(note: BNote, content: string | Buffer) {
if (!note || !['text', 'relationMap'].includes(note.type)) {
return;
}
@ -896,7 +897,7 @@ function scanForLinks(note: BNote, content: string) {
/**
* Things which have to be executed after updating content, but asynchronously (separate transaction)
*/
async function asyncPostProcessContent(note: BNote, content: string) {
async function asyncPostProcessContent(note: BNote, content: string | Buffer) {
if (cls.isMigrationRunning()) {
// this is rarely needed for migrations, but can cause trouble by e.g. triggering downloads
return;

View File

@ -15,7 +15,7 @@ function getOptionOrNull(name: string): string | null {
return option ? option.value : null;
}
function getOption(name: string): string {
function getOption(name: string) {
const val = getOptionOrNull(name);
if (val === null) {
@ -44,15 +44,15 @@ function getOptionInt(name: string, defaultValue?: number): number {
function getOptionBool(name: string): boolean {
const val = getOption(name);
if (!['true', 'false'].includes(val)) {
if (typeof val !== "string" || !['true', 'false'].includes(val)) {
throw new Error(`Could not parse '${val}' into boolean for option '${name}'`);
}
return val === 'true';
}
function setOption(name: string, value: string | boolean) {
if (value === true || value === false) {
function setOption(name: string, value: string | number | boolean) {
if (value === true || value === false || typeof value === "number") {
value = value.toString();
}
@ -68,7 +68,7 @@ function setOption(name: string, value: string | boolean) {
}
}
function createOption(name: string, value: string, isSynced: boolean) {
function createOption(name: string, value: string | number, isSynced: boolean) {
// to avoid circular dependency, need to find a better solution
const BOption = require('../becca/entities/boption');
@ -84,7 +84,7 @@ function getOptions() {
}
function getOptionMap() {
const map: Record<string, string> = {};
const map: Record<string | number, string> = {};
for (const option of Object.values(becca.options)) {
map[option.name] = option.value;

View File

@ -33,7 +33,7 @@ interface Client {
request(opts: ClientOpts): Request;
}
function exec(opts: ExecOpts) {
function exec<T>(opts: ExecOpts): Promise<T> {
const client = getClient(opts);
// hack for cases where electron.net does not work, but we don't want to set proxy
@ -129,7 +129,7 @@ function exec(opts: ExecOpts) {
: opts.body;
}
request.end(payload);
request.end(payload as string);
}
catch (e: any) {
reject(generateError(opts, e.message));

View File

@ -3,7 +3,7 @@ export interface CookieJar {
}
export interface ExecOpts {
proxy: "noproxy" | null;
proxy: string | null;
method: string;
url: string;
paging?: {
@ -16,5 +16,5 @@ export interface ExecOpts {
password?: string;
},
timeout: number;
body: string;
body?: string | {};
}

View File

@ -1,15 +1,16 @@
const syncService = require('./sync');
const log = require('./log');
const sqlInit = require('./sql_init');
const optionService = require('./options');
const syncOptions = require('./sync_options');
const request = require('./request');
const appInfo = require('./app_info');
const utils = require('./utils');
const becca = require('../becca/becca');
import syncService = require('./sync');
import log = require('./log');
import sqlInit = require('./sql_init');
import optionService = require('./options');
import syncOptions = require('./sync_options');
import request = require('./request');
import appInfo = require('./app_info');
import utils = require('./utils');
import becca = require('../becca/becca');
import { SetupStatusResponse, SetupSyncSeedResponse } from './api-interface';
async function hasSyncServerSchemaAndSeed() {
const response = await requestToSyncServer('GET', '/api/setup/status');
const response = await requestToSyncServer<SetupStatusResponse>('GET', '/api/setup/status');
if (response.syncVersion !== appInfo.syncVersion) {
throw new Error(`Could not setup sync since local sync protocol version is ${appInfo.syncVersion} while remote is ${response.syncVersion}. To fix this issue, use same Trilium version on all instances.`);
@ -32,7 +33,7 @@ function triggerSync() {
async function sendSeedToSyncServer() {
log.info("Initiating sync to server");
await requestToSyncServer('POST', '/api/setup/sync-seed', {
await requestToSyncServer<void>('POST', '/api/setup/sync-seed', {
options: getSyncSeedOptions(),
syncVersion: appInfo.syncVersion
});
@ -43,7 +44,7 @@ async function sendSeedToSyncServer() {
optionService.setOption('lastSyncedPull', 0);
}
async function requestToSyncServer(method, path, body = null) {
async function requestToSyncServer<T>(method: string, path: string, body?: string | {}): Promise<T> {
const timeout = syncOptions.getSyncTimeout();
return await utils.timeLimit(request.exec({
@ -52,10 +53,10 @@ async function requestToSyncServer(method, path, body = null) {
body,
proxy: syncOptions.getSyncProxy(),
timeout: timeout
}), timeout);
}), timeout) as T;
}
async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) {
async function setupSyncFromSyncServer(syncServerHost: string, syncProxy: string, password: string) {
if (sqlInit.isDbInitialized()) {
return {
result: 'failure',
@ -67,7 +68,7 @@ async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) {
log.info("Getting document options FROM sync server.");
// the response is expected to contain documentId and documentSecret options
const resp = await request.exec({
const resp = await request.exec<SetupSyncSeedResponse>({
method: 'get',
url: `${syncServerHost}/api/setup/sync-seed`,
auth: { password },
@ -92,7 +93,7 @@ async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) {
return { result: 'success' };
}
catch (e) {
catch (e: any) {
log.error(`Sync failed: '${e.message}', stack: ${e.stack}`);
return {

View File

@ -1,9 +1,19 @@
type Updater = () => void;
class SpacedUpdate {
constructor(updater, updateInterval = 1000) {
private updater: Updater;
private lastUpdated: number;
private changed: boolean;
private updateInterval: number;
private changeForbidden: boolean;
constructor(updater: Updater, updateInterval = 1000) {
this.updater = updater;
this.lastUpdated = Date.now();
this.changed = false;
this.updateInterval = updateInterval;
this.changeForbidden = false;
}
scheduleUpdate() {
@ -52,7 +62,7 @@ class SpacedUpdate {
}
}
async allowUpdateWithoutChange(callback) {
async allowUpdateWithoutChange(callback: () => void) {
this.changeForbidden = true;
try {
@ -64,4 +74,4 @@ class SpacedUpdate {
}
}
module.exports = SpacedUpdate;
export = SpacedUpdate;

View File

@ -1,16 +1,20 @@
const attributeService = require('./attributes');
const dateNoteService = require('./date_notes');
const becca = require('../becca/becca');
const noteService = require('./notes');
const dateUtils = require('./date_utils');
const log = require('./log');
const hoistedNoteService = require('./hoisted_note');
const searchService = require('./search/services/search');
const SearchContext = require('./search/search_context');
const {LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT} = require('./hidden_subtree');
import attributeService = require('./attributes');
import dateNoteService = require('./date_notes');
import becca = require('../becca/becca');
import noteService = require('./notes');
import dateUtils = require('./date_utils');
import log = require('./log');
import hoistedNoteService = require('./hoisted_note');
import searchService = require('./search/services/search');
import SearchContext = require('./search/search_context');
import hiddenSubtree = require('./hidden_subtree');
const { LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT } = hiddenSubtree;
function getInboxNote(date) {
function getInboxNote(date: string) {
const workspaceNote = hoistedNoteService.getWorkspaceNote();
if (!workspaceNote) {
throw new Error("Unable to find workspace note");
}
let inbox;
@ -48,8 +52,9 @@ function createSqlConsole() {
return note;
}
function saveSqlConsole(sqlConsoleNoteId) {
function saveSqlConsole(sqlConsoleNoteId: string) {
const sqlConsoleNote = becca.getNote(sqlConsoleNoteId);
if (!sqlConsoleNote) throw new Error(`Unable to find SQL console note ID: ${sqlConsoleNoteId}`);
const today = dateUtils.localNowDate();
const sqlConsoleHome =
@ -59,7 +64,7 @@ function saveSqlConsole(sqlConsoleNoteId) {
const result = sqlConsoleNote.cloneTo(sqlConsoleHome.noteId);
for (const parentBranch of sqlConsoleNote.getParentBranches()) {
if (parentBranch.parentNote.hasAncestor('_hidden')) {
if (parentBranch.parentNote?.hasAncestor('_hidden')) {
parentBranch.markAsDeleted();
}
}
@ -67,7 +72,7 @@ function saveSqlConsole(sqlConsoleNoteId) {
return result;
}
function createSearchNote(searchString, ancestorNoteId) {
function createSearchNote(searchString: string, ancestorNoteId: string) {
const {note} = noteService.createNewNote({
parentNoteId: getMonthlyParentNoteId('_search', 'search'),
title: `Search: ${searchString}`,
@ -88,6 +93,9 @@ function createSearchNote(searchString, ancestorNoteId) {
function getSearchHome() {
const workspaceNote = hoistedNoteService.getWorkspaceNote();
if (!workspaceNote) {
throw new Error("Unable to find workspace note");
}
if (!workspaceNote.isRoot()) {
return workspaceNote.searchNoteInSubtree('#workspaceSearchHome')
@ -101,14 +109,18 @@ function getSearchHome() {
}
}
function saveSearchNote(searchNoteId) {
function saveSearchNote(searchNoteId: string) {
const searchNote = becca.getNote(searchNoteId);
if (!searchNote) {
throw new Error("Unable to find search note");
}
const searchHome = getSearchHome();
const result = searchNote.cloneTo(searchHome.noteId);
for (const parentBranch of searchNote.getParentBranches()) {
if (parentBranch.parentNote.hasAncestor('_hidden')) {
if (parentBranch.parentNote?.hasAncestor('_hidden')) {
parentBranch.markAsDeleted();
}
}
@ -116,7 +128,7 @@ function saveSearchNote(searchNoteId) {
return result;
}
function getMonthlyParentNoteId(rootNoteId, prefix) {
function getMonthlyParentNoteId(rootNoteId: string, prefix: string) {
const month = dateUtils.localNowDate().substring(0, 7);
const labelName = `${prefix}MonthNote`;
@ -138,7 +150,7 @@ function getMonthlyParentNoteId(rootNoteId, prefix) {
return monthNote.noteId;
}
function createScriptLauncher(parentNoteId, forceNoteId = null) {
function createScriptLauncher(parentNoteId: string, forceNoteId?: string) {
const note = noteService.createNewNote({
noteId: forceNoteId,
title: "Script Launcher",
@ -151,7 +163,13 @@ function createScriptLauncher(parentNoteId, forceNoteId = null) {
return note;
}
function createLauncher({parentNoteId, launcherType, noteId}) {
interface LauncherConfig {
parentNoteId: string;
launcherType: string;
noteId: string;
}
function createLauncher({ parentNoteId, launcherType, noteId }: LauncherConfig) {
let note;
if (launcherType === 'note') {
@ -197,10 +215,10 @@ function createLauncher({parentNoteId, launcherType, noteId}) {
};
}
function resetLauncher(noteId) {
function resetLauncher(noteId: string) {
const note = becca.getNote(noteId);
if (note.isLaunchBarConfig()) {
if (note?.isLaunchBarConfig()) {
if (note) {
if (noteId === '_lbRoot') {
// deleting hoisted notes are not allowed, so we just reset the children
@ -228,7 +246,13 @@ function resetLauncher(noteId) {
* Another use case was for script-packages (e.g. demo Task manager) which could this way register automatically/easily
* into the launchbar - for this it's recommended to use backend API's createOrUpdateLauncher()
*/
function createOrUpdateScriptLauncherFromApi(opts) {
function createOrUpdateScriptLauncherFromApi(opts: {
id: string;
title: string;
action: string;
icon?: string;
shortcut?: string;
}) {
if (opts.id && !/^[a-z0-9]+$/i.test(opts.id)) {
throw new Error(`Launcher ID can be alphanumeric only, '${opts.id}' given`);
}
@ -263,7 +287,7 @@ function createOrUpdateScriptLauncherFromApi(opts) {
return launcherNote;
}
module.exports = {
export = {
getInboxNote,
createSqlConsole,
saveSqlConsole,

View File

@ -96,7 +96,7 @@ async function createInitialDatabase() {
const dummyTaskContext = new TaskContext("no-progress-reporting", 'import', false);
const zipImportService = require('./import/zip.js');
const zipImportService = require('./import/zip');
await zipImportService.importZip(dummyTaskContext, demoFile, rootNote);
sql.transactional(() => {
@ -179,7 +179,7 @@ dbReady.then(() => {
});
function getDbSize() {
return sql.getValue("SELECT page_count * page_size / 1000 as size FROM pragma_page_count(), pragma_page_size()");
return sql.getValue<number>("SELECT page_count * page_size / 1000 as size FROM pragma_page_count(), pragma_page_size()");
}
log.info(`DB size: ${getDbSize()} KB`);

View File

@ -107,7 +107,7 @@ async function sync() {
}
async function login() {
const setupService = require('./setup.js'); // circular dependency issue
const setupService = require('./setup'); // circular dependency issue
if (!await setupService.hasSyncServerSchemaAndSeed()) {
await setupService.sendSeedToSyncServer();
@ -282,7 +282,7 @@ async function checkContentHash(syncContext: SyncContext) {
if (failedChecks.length > 0) {
// before re-queuing sectors, make sure the entity changes are correct
const consistencyChecks = require('./consistency_checks.js');
const consistencyChecks = require('./consistency_checks');
consistencyChecks.runEntityChangesChecks();
await syncRequest(syncContext, 'POST', `/api/sync/check-entity-changes`);

View File

@ -1,5 +1,6 @@
"use strict";
import { TaskData } from './task_context_interface';
import ws = require('./ws');
// taskId => TaskContext
@ -9,9 +10,9 @@ class TaskContext {
private taskId: string;
private taskType: string | null;
private data: {} | null;
private progressCount: number;
private lastSentCountTs: number;
data: TaskData | null;
noteDeletionHandlerTriggered: boolean;
constructor(taskId: string, taskType: string | null = null, data: {} | null = {}) {
@ -65,7 +66,7 @@ class TaskContext {
});
}
taskSucceeded(result: string) {
taskSucceeded(result?: string) {
ws.sendMessageToAllClients({
type: 'taskSucceeded',
taskId: this.taskId,

View File

@ -0,0 +1,7 @@
export interface TaskData {
safeImport?: boolean;
textImportedAsText?: boolean;
codeImportedAsCode?: boolean;
shrinkImages?: boolean;
replaceUnderscoresWithSpaces?: boolean;
}

View File

@ -226,8 +226,8 @@ function removeTextFileExtension(filePath: string) {
}
}
function getNoteTitle(filePath: string, replaceUnderscoresWithSpaces: boolean, noteMeta: { title: string }) {
if (noteMeta) {
function getNoteTitle(filePath: string, replaceUnderscoresWithSpaces: boolean, noteMeta?: { title?: string }) {
if (noteMeta?.title) {
return noteMeta.title;
} else {
const basename = path.basename(removeTextFileExtension(filePath));

View File

@ -12,6 +12,7 @@ import AbstractBeccaEntity = require('../becca/entities/abstract_becca_entity');
import env = require('./env');
import { IncomingMessage, Server } from 'http';
import { EntityChange } from './entity_changes_interface';
if (env.isDev()) {
const chokidar = require('chokidar');
const debounce = require('debounce');
@ -30,7 +31,8 @@ interface Message {
type: string;
data?: {
lastSyncedPush?: number | null,
entityChanges?: any[]
entityChanges?: any[],
shrinkImages?: boolean
} | null,
lastSyncedPush?: number | null,

13
src/types.d.ts vendored
View File

@ -16,4 +16,17 @@ declare module 'html2plaintext' {
declare module 'normalize-strings' {
function normalizeString(string: string): string;
export = normalizeString;
}
declare module 'joplin-turndown-plugin-gfm' {
import TurndownService = require("turndown");
namespace gfm {
function gfm(service: TurndownService): void;
}
export = gfm;
}
declare module 'is-animated' {
function isAnimated(buffer: Buffer): boolean;
export = isAnimated;
}

View File

@ -4,7 +4,7 @@ const assetPath = require('./src/services/asset_path');
module.exports = {
mode: 'production',
entry: {
setup: './src/public/app/setup.js',
setup: './src/public/app/setup.ts',
mobile: './src/public/app/mobile.js',
desktop: './src/public/app/desktop.js',
},