diff --git a/bin/create-anonymization-script.js b/bin/create-anonymization-script.js index b16a038de..4c71d6071 100755 --- a/bin/create-anonymization-script.js +++ b/bin/create-anonymization-script.js @@ -1,6 +1,6 @@ #!/usr/bin/env node -const anonymizationService = require('../src/services/anonymization.js'); +const anonymizationService = require('../src/services/anonymization'); const fs = require('fs'); const path = require('path'); diff --git a/bin/release.sh b/bin/release.sh index cdaa324d2..649a41437 100755 --- a/bin/release.sh +++ b/bin/release.sh @@ -26,9 +26,9 @@ jq '.version = "'$VERSION'"' package.json|sponge package.json git add package.json -echo 'module.exports = { buildDate:"'`date --iso-8601=seconds`'", buildRevision: "'`git log -1 --format="%H"`'" };' > src/services/build.js +echo 'export = { buildDate:"'`date --iso-8601=seconds`'", buildRevision: "'`git log -1 --format="%H"`'" };' > src/services/build.ts -git add src/services/build.js +git add src/services/build.ts TAG=v$VERSION diff --git a/db/migrations/0220__migrate_images_to_attachments.js b/db/migrations/0220__migrate_images_to_attachments.js index a9b2bfdbf..f88894820 100644 --- a/db/migrations/0220__migrate_images_to_attachments.js +++ b/db/migrations/0220__migrate_images_to_attachments.js @@ -1,5 +1,5 @@ module.exports = () => { - const beccaLoader = require('../../src/becca/becca_loader.js'); + const beccaLoader = require('../../src/becca/becca_loader'); const becca = require('../../src/becca/becca'); const cls = require('../../src/services/cls'); const log = require('../../src/services/log'); diff --git a/docker_healthcheck.js b/docker_healthcheck.js index 4c06a2627..88a8f3509 100755 --- a/docker_healthcheck.js +++ b/docker_healthcheck.js @@ -10,7 +10,7 @@ if (config.Network.https) { process.exit(0); } -const port = require('./src/services/port.js'); +const port = require('./src/services/port.ts'); const host = require('./src/services/host.js'); const options = { timeout: 2000 }; diff --git a/electron.js b/electron.js index 7cd04cfb2..13ba5acc5 100644 --- a/electron.js +++ b/electron.js @@ -1,7 +1,7 @@ 'use strict'; const {app, globalShortcut, BrowserWindow} = require('electron'); -const sqlInit = require('./src/services/sql_init.js'); +const sqlInit = require('./src/services/sql_init'); const appIconService = require('./src/services/app_icon.js'); const windowService = require('./src/services/window.js'); const tray = require('./src/services/tray.js'); diff --git a/package-lock.json b/package-lock.json index 283832065..654337658 100644 --- a/package-lock.json +++ b/package-lock.json @@ -93,8 +93,10 @@ "@types/escape-html": "^1.0.4", "@types/express": "^4.17.21", "@types/ini": "^4.1.0", + "@types/jsdom": "^21.1.6", "@types/mime-types": "^2.1.4", "@types/node": "^20.11.19", + "@types/sanitize-html": "^2.11.0", "@types/ws": "^8.5.10", "cross-env": "7.0.3", "electron": "25.9.8", @@ -1333,6 +1335,41 @@ "integrity": "sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==", "dev": true }, + "node_modules/@types/jsdom": { + "version": "21.1.6", + "resolved": "https://registry.npmjs.org/@types/jsdom/-/jsdom-21.1.6.tgz", + "integrity": "sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/tough-cookie": "*", + "parse5": "^7.0.0" + } + }, + "node_modules/@types/jsdom/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/@types/jsdom/node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, "node_modules/@types/json-schema": { "version": "7.0.9", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", @@ -1439,6 +1476,89 @@ "@types/node": "*" } }, + "node_modules/@types/sanitize-html": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.11.0.tgz", + "integrity": "sha512-7oxPGNQHXLHE48r/r/qjn7q0hlrs3kL7oZnGj0Wf/h9tj/6ibFyRkNbsDxaBBZ4XUZ0Dx5LGCyDJ04ytSofacQ==", + "dev": true, + "dependencies": { + "htmlparser2": "^8.0.0" + } + }, + "node_modules/@types/sanitize-html/node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/@types/sanitize-html/node_modules/htmlparser2": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", + "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + }, "node_modules/@types/send": { "version": "0.17.4", "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", @@ -1460,6 +1580,12 @@ "@types/node": "*" } }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "dev": true + }, "node_modules/@types/unist": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", @@ -14124,6 +14250,34 @@ "integrity": "sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==", "dev": true }, + "@types/jsdom": { + "version": "21.1.6", + "resolved": "https://registry.npmjs.org/@types/jsdom/-/jsdom-21.1.6.tgz", + "integrity": "sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==", + "dev": true, + "requires": { + "@types/node": "*", + "@types/tough-cookie": "*", + "parse5": "^7.0.0" + }, + "dependencies": { + "entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true + }, + "parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "requires": { + "entities": "^4.4.0" + } + } + } + }, "@types/json-schema": { "version": "7.0.9", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", @@ -14230,6 +14384,66 @@ "@types/node": "*" } }, + "@types/sanitize-html": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.11.0.tgz", + "integrity": "sha512-7oxPGNQHXLHE48r/r/qjn7q0hlrs3kL7oZnGj0Wf/h9tj/6ibFyRkNbsDxaBBZ4XUZ0Dx5LGCyDJ04ytSofacQ==", + "dev": true, + "requires": { + "htmlparser2": "^8.0.0" + }, + "dependencies": { + "dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "requires": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + } + }, + "domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "requires": { + "domelementtype": "^2.3.0" + } + }, + "domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dev": true, + "requires": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + } + }, + "entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true + }, + "htmlparser2": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", + "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", + "dev": true, + "requires": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + } + } + }, "@types/send": { "version": "0.17.4", "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", @@ -14251,6 +14465,12 @@ "@types/node": "*" } }, + "@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "dev": true + }, "@types/unist": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", diff --git a/package.json b/package.json index 5466a2e90..ff6ba08e2 100644 --- a/package.json +++ b/package.json @@ -114,8 +114,10 @@ "@types/escape-html": "^1.0.4", "@types/express": "^4.17.21", "@types/ini": "^4.1.0", + "@types/jsdom": "^21.1.6", "@types/mime-types": "^2.1.4", "@types/node": "^20.11.19", + "@types/sanitize-html": "^2.11.0", "@types/ws": "^8.5.10", "cross-env": "7.0.3", "electron": "25.9.8", diff --git a/src/anonymize.js b/src/anonymize.js index 5099b32b0..0fa00cda1 100644 --- a/src/anonymize.js +++ b/src/anonymize.js @@ -1,6 +1,6 @@ -const anonymizationService = require('./services/anonymization.js'); -const sqlInit = require('./services/sql_init.js'); -require('./becca/entity_constructor.js'); +const anonymizationService = require('./services/anonymization'); +const sqlInit = require('./services/sql_init'); +require('./becca/entity_constructor'); sqlInit.dbReady.then(async () => { try { diff --git a/src/app.js b/src/app.js index a2e7b4f31..c6afc27f8 100644 --- a/src/app.js +++ b/src/app.js @@ -8,7 +8,7 @@ const sessionParser = require('./routes/session_parser.js'); const utils = require('./services/utils'); require('./services/handlers.js'); -require('./becca/becca_loader.js'); +require('./becca/becca_loader'); const app = express(); @@ -46,7 +46,7 @@ require('./routes/error_handlers.js').register(app); require('./services/sync.js'); // triggers backup timer -require('./services/backup.js'); +require('./services/backup'); // trigger consistency checks timer require('./services/consistency_checks.js'); diff --git a/src/becca/becca-interface.ts b/src/becca/becca-interface.ts index 10495fc7a..5a454335c 100644 --- a/src/becca/becca-interface.ts +++ b/src/becca/becca-interface.ts @@ -21,7 +21,7 @@ interface AttachmentOpts { * Becca is a backend cache of all notes, branches, and attributes. * There's a similar frontend cache Froca, and share cache Shaca. */ -class Becca { +export default class Becca { loaded!: boolean; notes!: Record; @@ -280,4 +280,12 @@ class Becca { } } -export = Becca; \ No newline at end of file +/** + * This interface contains the data that is shared across all the objects of a given derived class of {@link AbstractBeccaEntity}. + * For example, all BAttributes will share their content, but all BBranches will have another set of this data. + */ +export interface ConstructorData> { + primaryKeyName: string; + entityName: string; + hashedProperties: (keyof T)[]; +} \ No newline at end of file diff --git a/src/becca/becca.ts b/src/becca/becca.ts index 8ea1a6575..a66dc442d 100644 --- a/src/becca/becca.ts +++ b/src/becca/becca.ts @@ -1,6 +1,6 @@ "use strict"; -import Becca = require("./becca-interface"); +import Becca from "./becca-interface"; const becca = new Becca(); diff --git a/src/becca/becca_loader.js b/src/becca/becca_loader.ts similarity index 74% rename from src/becca/becca_loader.js rename to src/becca/becca_loader.ts index a3004d9b7..507828ad5 100644 --- a/src/becca/becca_loader.js +++ b/src/becca/becca_loader.ts @@ -1,19 +1,21 @@ "use strict"; -const sql = require('../services/sql'); -const eventService = require('../services/events'); -const becca = require('./becca'); -const sqlInit = require('../services/sql_init'); -const log = require('../services/log'); -const BNote = require('./entities/bnote'); -const BBranch = require('./entities/bbranch'); -const BAttribute = require('./entities/battribute'); -const BOption = require('./entities/boption'); -const BEtapiToken = require('./entities/betapi_token'); -const cls = require('../services/cls'); -const entityConstructor = require('../becca/entity_constructor'); +import sql = require('../services/sql'); +import eventService = require('../services/events'); +import becca = require('./becca'); +import sqlInit = require('../services/sql_init'); +import log = require('../services/log'); +import BNote = require('./entities/bnote'); +import BBranch = require('./entities/bbranch'); +import BAttribute = require('./entities/battribute'); +import BOption = require('./entities/boption'); +import BEtapiToken = require('./entities/betapi_token'); +import cls = require('../services/cls'); +import entityConstructor = require('../becca/entity_constructor'); +import { AttributeRow, BranchRow, EtapiTokenRow, NoteRow, OptionRow } from './entities/rows'; +import AbstractBeccaEntity = require('./entities/abstract_becca_entity'); -const beccaLoaded = new Promise((res, rej) => { +const beccaLoaded = new Promise((res, rej) => { sqlInit.dbReady.then(() => { cls.init(() => { load(); @@ -38,23 +40,23 @@ function load() { new BNote().update(row).init(); } - const branchRows = sql.getRawRows(`SELECT branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified FROM branches WHERE isDeleted = 0`); + const branchRows = sql.getRawRows(`SELECT branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified FROM branches WHERE isDeleted = 0`); // in-memory sort is faster than in the DB - branchRows.sort((a, b) => a.notePosition - b.notePosition); + branchRows.sort((a, b) => (a.notePosition || 0) - (b.notePosition || 0)); for (const row of branchRows) { new BBranch().update(row).init(); } - for (const row of sql.getRawRows(`SELECT attributeId, noteId, type, name, value, isInheritable, position, utcDateModified FROM attributes WHERE isDeleted = 0`)) { + for (const row of sql.getRawRows(`SELECT attributeId, noteId, type, name, value, isInheritable, position, utcDateModified FROM attributes WHERE isDeleted = 0`)) { new BAttribute().update(row).init(); } - for (const row of sql.getRows(`SELECT name, value, isSynced, utcDateModified FROM options`)) { + for (const row of sql.getRows(`SELECT name, value, isSynced, utcDateModified FROM options`)) { new BOption(row); } - for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) { + for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) { new BEtapiToken(row); } }); @@ -68,7 +70,7 @@ function load() { log.info(`Becca (note cache) load took ${Date.now() - start}ms`); } -function reload(reason) { +function reload(reason: string) { load(); require('../services/ws').reloadFrontend(reason || "becca reloaded"); @@ -88,7 +90,7 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_CHANGE_SYNCED], ({ entity if (beccaEntity) { beccaEntity.updateFromRow(entityRow); } else { - beccaEntity = new EntityClass(); + beccaEntity = new EntityClass() as AbstractBeccaEntity>; beccaEntity.updateFromRow(entityRow); beccaEntity.init(); } @@ -112,7 +114,7 @@ eventService.subscribeBeccaLoader(eventService.ENTITY_CHANGED, ({ entityName, en * @param entityRow - can be a becca entity (change comes from this trilium instance) or just a row (from sync). * It should be therefore treated as a row. */ -function postProcessEntityUpdate(entityName, entityRow) { +function postProcessEntityUpdate(entityName: string, entityRow: any) { if (entityName === 'notes') { noteUpdated(entityRow); } else if (entityName === 'branches') { @@ -140,13 +142,13 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_DELETED, eventService.ENT } }); -function noteDeleted(noteId) { +function noteDeleted(noteId: string) { delete becca.notes[noteId]; becca.dirtyNoteSetCache(); } -function branchDeleted(branchId) { +function branchDeleted(branchId: string) { const branch = becca.branches[branchId]; if (!branch) { @@ -173,23 +175,26 @@ function branchDeleted(branchId) { } delete becca.childParentToBranch[`${branch.noteId}-${branch.parentNoteId}`]; - delete becca.branches[branch.branchId]; -} - -function noteUpdated(entityRow) { - const note = becca.notes[entityRow.noteId]; - - if (note) { - // type / mime could have been changed, and they are present in flatTextCache - note.flatTextCache = null; + if (branch.branchId) { + delete becca.branches[branch.branchId]; } } -function branchUpdated(branchRow) { +function noteUpdated(entityRow: NoteRow) { + const note = becca.notes[entityRow.noteId]; + + if (note) { + // TODO, this wouldn't have worked in the original implementation since the variable was named __flatTextCache. + // type / mime could have been changed, and they are present in flatTextCache + note.__flatTextCache = null; + } +} + +function branchUpdated(branchRow: BranchRow) { const childNote = becca.notes[branchRow.noteId]; if (childNote) { - childNote.flatTextCache = null; + childNote.__flatTextCache = null; childNote.sortParents(); // notes in the subtree can get new inherited attributes @@ -204,7 +209,7 @@ function branchUpdated(branchRow) { } } -function attributeDeleted(attributeId) { +function attributeDeleted(attributeId: string) { const attribute = becca.attributes[attributeId]; if (!attribute) { @@ -239,8 +244,7 @@ function attributeDeleted(attributeId) { } } -/** @param {BAttribute} attributeRow */ -function attributeUpdated(attributeRow) { +function attributeUpdated(attributeRow: BAttribute) { const attribute = becca.attributes[attributeRow.attributeId]; const note = becca.notes[attributeRow.noteId]; @@ -253,7 +257,7 @@ function attributeUpdated(attributeRow) { } } -function noteReorderingUpdated(branchIdList) { +function noteReorderingUpdated(branchIdList: number[]) { const parentNoteIds = new Set(); for (const branchId in branchIdList) { @@ -267,7 +271,7 @@ function noteReorderingUpdated(branchIdList) { } } -function etapiTokenDeleted(etapiTokenId) { +function etapiTokenDeleted(etapiTokenId: string) { delete becca.etapiTokens[etapiTokenId]; } @@ -275,14 +279,14 @@ eventService.subscribeBeccaLoader(eventService.ENTER_PROTECTED_SESSION, () => { try { becca.decryptProtectedNotes(); } - catch (e) { + catch (e: any) { log.error(`Could not decrypt protected notes: ${e.message} ${e.stack}`); } }); eventService.subscribeBeccaLoader(eventService.LEAVE_PROTECTED_SESSION, load); -module.exports = { +export = { load, reload, beccaLoaded diff --git a/src/becca/becca_service.js b/src/becca/becca_service.ts similarity index 82% rename from src/becca/becca_service.js rename to src/becca/becca_service.ts index 15a1c07cc..2a9eb2781 100644 --- a/src/becca/becca_service.js +++ b/src/becca/becca_service.ts @@ -1,10 +1,10 @@ "use strict"; -const becca = require('./becca'); -const cls = require('../services/cls'); -const log = require('../services/log'); +import becca = require('./becca'); +import cls = require('../services/cls'); +import log = require('../services/log'); -function isNotePathArchived(notePath) { +function isNotePathArchived(notePath: string[]) { const noteId = notePath[notePath.length - 1]; const note = becca.notes[noteId]; @@ -24,9 +24,9 @@ function isNotePathArchived(notePath) { return false; } -function getNoteTitle(childNoteId, parentNoteId) { +function getNoteTitle(childNoteId: string, parentNoteId?: string) { const childNote = becca.notes[childNoteId]; - const parentNote = becca.notes[parentNoteId]; + const parentNote = parentNoteId ? becca.notes[parentNoteId] : null; if (!childNote) { log.info(`Cannot find note '${childNoteId}'`); @@ -40,7 +40,7 @@ function getNoteTitle(childNoteId, parentNoteId) { return `${(branch && branch.prefix) ? `${branch.prefix} - ` : ''}${title}`; } -function getNoteTitleArrayForPath(notePathArray) { +function getNoteTitleArrayForPath(notePathArray: string[]) { if (!notePathArray || !Array.isArray(notePathArray)) { throw new Error(`${notePathArray} is not an array.`); } @@ -76,13 +76,13 @@ function getNoteTitleArrayForPath(notePathArray) { return titles; } -function getNoteTitleForPath(notePathArray) { +function getNoteTitleForPath(notePathArray: string[]) { const titles = getNoteTitleArrayForPath(notePathArray); return titles.join(' / '); } -module.exports = { +export = { getNoteTitle, getNoteTitleForPath, isNotePathArchived diff --git a/src/becca/entities/abstract_becca_entity.ts b/src/becca/entities/abstract_becca_entity.ts index 1c775b51e..ed9901b2f 100644 --- a/src/becca/entities/abstract_becca_entity.ts +++ b/src/becca/entities/abstract_becca_entity.ts @@ -9,25 +9,15 @@ import cls = require('../../services/cls'); import log = require('../../services/log'); import protectedSessionService = require('../../services/protected_session'); import blobService = require('../../services/blob'); -import Becca = require('../becca-interface'); +import Becca, { ConstructorData } from '../becca-interface'; -let becca: Becca | null = null; +let becca: Becca; interface ContentOpts { forceSave?: boolean; forceFrontendReload?: boolean; } -/** - * This interface contains the data that is shared across all the objects of a given derived class of {@link AbstractBeccaEntity}. - * For example, all BAttributes will share their content, but all BBranches will have another set of this data. - */ -interface ConstructorData> { - primaryKeyName: string; - entityName: string; - hashedProperties: (keyof T)[]; -} - /** * Base class for all backend entities. * @@ -35,10 +25,11 @@ interface ConstructorData> { */ abstract class AbstractBeccaEntity> { - protected utcDateCreated?: string; protected utcDateModified?: string; protected dateCreated?: string; protected dateModified?: string; + + utcDateCreated!: string; isProtected?: boolean; isSynced?: boolean; @@ -101,6 +92,12 @@ abstract class AbstractBeccaEntity> { abstract getPojo(): {}; + init() { + // Do nothing by default, can be overriden in derived classes. + } + + abstract updateFromRow(row: unknown): void; + get isDeleted(): boolean { // TODO: Not sure why some entities don't implement it. return false; @@ -109,13 +106,14 @@ abstract class AbstractBeccaEntity> { /** * Saves entity - executes SQL, but doesn't commit the transaction on its own */ - save(): this { + // TODO: opts not used but called a few times, maybe should be used by derived classes or passed to beforeSaving. + save(opts?: {}): this { const constructorData = (this.constructor as unknown as ConstructorData); const entityName = constructorData.entityName; const primaryKeyName = constructorData.primaryKeyName; const isNewEntity = !(this as any)[primaryKeyName]; - + this.beforeSaving(); const pojo = this.getPojoToSave(); diff --git a/src/becca/entities/battachment.ts b/src/becca/entities/battachment.ts index 206c03286..7b203839a 100644 --- a/src/becca/entities/battachment.ts +++ b/src/becca/entities/battachment.ts @@ -37,16 +37,16 @@ class BAttachment extends AbstractBeccaEntity { noteId?: number; attachmentId?: string; /** either noteId or revisionId to which this attachment belongs */ - ownerId: string; - role: string; - mime: string; - title: string; + ownerId!: string; + role!: string; + mime!: string; + title!: string; type?: keyof typeof attachmentRoleToNoteTypeMapping; position?: number; blobId?: string; isProtected?: boolean; dateModified?: string; - utcDateScheduledForErasureSince?: string; + utcDateScheduledForErasureSince?: string | null; /** optionally added to the entity */ contentLength?: number; isDecrypted?: boolean; @@ -54,6 +54,11 @@ class BAttachment extends AbstractBeccaEntity { constructor(row: AttachmentRow) { super(); + this.updateFromRow(row); + this.decrypt(); + } + + updateFromRow(row: AttachmentRow): void { if (!row.ownerId?.trim()) { throw new Error("'ownerId' must be given to initialize a Attachment entity"); } else if (!row.role?.trim()) { @@ -76,8 +81,6 @@ class BAttachment extends AbstractBeccaEntity { this.utcDateModified = row.utcDateModified; this.utcDateScheduledForErasureSince = row.utcDateScheduledForErasureSince; this.contentLength = row.contentLength; - - this.decrypt(); } copy(): BAttachment { @@ -127,8 +130,8 @@ class BAttachment extends AbstractBeccaEntity { } } - getContent(): string | Buffer { - return this._getContent(); + getContent(): Buffer { + return this._getContent() as Buffer; } setContent(content: string | Buffer, opts: ContentOpts) { @@ -171,6 +174,11 @@ class BAttachment extends AbstractBeccaEntity { if (this.role === 'image' && parentNote.type === 'text') { const origContent = parentNote.getContent(); + + if (typeof origContent !== "string") { + throw new Error(`Note with ID '${note.noteId} has a text type but non-string content.`); + } + const oldAttachmentUrl = `api/attachments/${this.attachmentId}/image/`; const newNoteUrl = `api/images/${note.noteId}/`; diff --git a/src/becca/entities/battribute.ts b/src/becca/entities/battribute.ts index c3d0115fc..b29d3e237 100644 --- a/src/becca/entities/battribute.ts +++ b/src/becca/entities/battribute.ts @@ -28,7 +28,7 @@ class BAttribute extends AbstractBeccaEntity { value!: string; isInheritable!: boolean; - constructor(row: AttributeRow) { + constructor(row?: AttributeRow) { super(); if (!row) { @@ -52,7 +52,7 @@ class BAttribute extends AbstractBeccaEntity { ]); } - update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]: any[]) { + update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]: any) { this.attributeId = attributeId; this.noteId = noteId; this.type = type; diff --git a/src/becca/entities/bblob.ts b/src/becca/entities/bblob.ts index 149b9070a..40d1c5885 100644 --- a/src/becca/entities/bblob.ts +++ b/src/becca/entities/bblob.ts @@ -1,18 +1,24 @@ +import AbstractBeccaEntity = require("./abstract_becca_entity"); import { BlobRow } from "./rows"; // TODO: Why this does not extend the abstract becca? -class BBlob { +class BBlob extends AbstractBeccaEntity { static get entityName() { return "blobs"; } static get primaryKeyName() { return "blobId"; } static get hashedProperties() { return ["blobId", "content"]; } - blobId: string; - content: string | Buffer; - contentLength: number; - dateModified: string; - utcDateModified: string; + blobId!: string; + content!: string | Buffer; + contentLength!: number; + dateModified!: string; + utcDateModified!: string; constructor(row: BlobRow) { + super(); + this.updateFromRow(row); + } + + updateFromRow(row: BlobRow): void { this.blobId = row.blobId; this.content = row.content; this.contentLength = row.contentLength; diff --git a/src/becca/entities/bbranch.ts b/src/becca/entities/bbranch.ts index bf64d2ac6..6b45a7e7a 100644 --- a/src/becca/entities/bbranch.ts +++ b/src/becca/entities/bbranch.ts @@ -30,7 +30,7 @@ class BBranch extends AbstractBeccaEntity { isExpanded!: boolean; utcDateModified?: string; - constructor(row: BranchRow) { + constructor(row?: BranchRow) { super(); if (!row) { diff --git a/src/becca/entities/betapi_token.ts b/src/becca/entities/betapi_token.ts index d128c1beb..390b580bd 100644 --- a/src/becca/entities/betapi_token.ts +++ b/src/becca/entities/betapi_token.ts @@ -19,12 +19,12 @@ class BEtapiToken extends AbstractBeccaEntity { static get primaryKeyName() { return "etapiTokenId"; } static get hashedProperties() { return ["etapiTokenId", "name", "tokenHash", "utcDateCreated", "utcDateModified", "isDeleted"]; } - etapiTokenId!: string; + etapiTokenId?: string; name!: string; tokenHash!: string; - private _isDeleted!: boolean; + private _isDeleted?: boolean; - constructor(row: EtapiTokenRow) { + constructor(row?: EtapiTokenRow) { super(); if (!row) { @@ -36,7 +36,7 @@ class BEtapiToken extends AbstractBeccaEntity { } get isDeleted() { - return this._isDeleted; + return !!this._isDeleted; } updateFromRow(row: EtapiTokenRow) { @@ -74,7 +74,9 @@ class BEtapiToken extends AbstractBeccaEntity { super.beforeSaving(); - this.becca.etapiTokens[this.etapiTokenId] = this; + if (this.etapiTokenId) { + this.becca.etapiTokens[this.etapiTokenId] = this; + } } } diff --git a/src/becca/entities/bnote.ts b/src/becca/entities/bnote.ts index 821e10443..cc251ffd0 100644 --- a/src/becca/entities/bnote.ts +++ b/src/becca/entities/bnote.ts @@ -70,7 +70,7 @@ class BNote extends AbstractBeccaEntity { children!: BNote[]; targetRelations!: BAttribute[]; - private __flatTextCache!: string | null; + __flatTextCache!: string | null; private __attributeCache!: BAttribute[] | null; private __inheritableAttributeCache!: BAttribute[] | null; @@ -86,7 +86,7 @@ class BNote extends AbstractBeccaEntity { /** number of note revisions for this note */ private revisionCount!: number | null; - constructor(row: Partial) { + constructor(row?: Partial) { super(); if (!row) { @@ -216,9 +216,8 @@ class BNote extends AbstractBeccaEntity { * - changes in the note metadata or title should not trigger note content sync (so we keep separate utcDateModified and entity changes records) * - but to the user note content and title changes are one and the same - single dateModified (so all changes must go through Note and content is not a separate entity) */ - // TODO: original declaration was (string | Buffer), but everywhere it's used as a string. - getContent(): string { - return this._getContent() as string; + getContent() { + return this._getContent(); } /** @@ -226,7 +225,7 @@ class BNote extends AbstractBeccaEntity { getJsonContent(): {} | null { const content = this.getContent(); - if (!content || !content.trim()) { + if (typeof content !== "string" || !content || !content.trim()) { return null; } @@ -243,7 +242,7 @@ class BNote extends AbstractBeccaEntity { } } - setContent(content: string, opts: ContentOpts = {}) { + setContent(content: Buffer | string, opts: ContentOpts = {}) { this._setContent(content, opts); eventService.emit(eventService.NOTE_CONTENT_CHANGE, { entity: this }); @@ -661,7 +660,7 @@ class BNote extends AbstractBeccaEntity { * @param name - relation name to filter * @returns all note's relations (attributes with type relation), including inherited ones */ - getRelations(name: string): BAttribute[] { + getRelations(name?: string): BAttribute[] { return this.getAttributes(RELATION, name); } @@ -1510,6 +1509,10 @@ class BNote extends AbstractBeccaEntity { const oldNoteUrl = `api/images/${this.noteId}/`; const newAttachmentUrl = `api/attachments/${attachment.attachmentId}/image/`; + if (typeof parentContent !== "string") { + throw new Error("Unable to convert image note into attachment because parent note does not have a string content."); + } + const fixedContent = utils.replaceAll(parentContent, oldNoteUrl, newAttachmentUrl); parentNote.setContent(fixedContent); @@ -1611,7 +1614,7 @@ class BNote extends AbstractBeccaEntity { revisionAttachment.ownerId = revision.revisionId; revisionAttachment.setContent(noteAttachment.getContent(), { forceSave: true }); - if (this.type === 'text') { + if (this.type === 'text' && typeof noteContent === "string") { // content is rewritten to point to the revision attachments noteContent = noteContent.replaceAll(`attachments/${noteAttachment.attachmentId}`, `attachments/${revisionAttachment.attachmentId}`); @@ -1654,7 +1657,10 @@ class BNote extends AbstractBeccaEntity { position }); - content = content || ""; + if (!content) { + throw new Error("Attempted to save an attachment with no content."); + } + attachment.setContent(content, {forceSave: true}); return attachment; diff --git a/src/becca/entities/boption.ts b/src/becca/entities/boption.ts index afbf5320e..48abee024 100644 --- a/src/becca/entities/boption.ts +++ b/src/becca/entities/boption.ts @@ -16,10 +16,12 @@ class BOption extends AbstractBeccaEntity { value!: string; isSynced!: boolean; - constructor(row: OptionRow) { + constructor(row?: OptionRow) { super(); - this.updateFromRow(row); + if (row) { + this.updateFromRow(row); + } this.becca.options[this.name] = this; } diff --git a/src/becca/entities/brecent_note.ts b/src/becca/entities/brecent_note.ts index 0771a5e00..c19a83603 100644 --- a/src/becca/entities/brecent_note.ts +++ b/src/becca/entities/brecent_note.ts @@ -11,14 +11,19 @@ import AbstractBeccaEntity = require('./abstract_becca_entity'); class BRecentNote extends AbstractBeccaEntity { static get entityName() { return "recent_notes"; } static get primaryKeyName() { return "noteId"; } + static get hashedProperties() { return ["noteId", "notePath"]; } - noteId: string; - notePath: string; - utcDateCreated: string; + noteId!: string; + notePath!: string; + utcDateCreated!: string; constructor(row: RecentNoteRow) { super(); + this.updateFromRow(row); + } + + updateFromRow(row: RecentNoteRow): void { this.noteId = row.noteId; this.notePath = row.notePath; this.utcDateCreated = row.utcDateCreated || dateUtils.utcNowDateTime(); diff --git a/src/becca/entities/brevision.ts b/src/becca/entities/brevision.ts index ba7cc00ba..101506858 100644 --- a/src/becca/entities/brevision.ts +++ b/src/becca/entities/brevision.ts @@ -29,22 +29,30 @@ class BRevision extends AbstractBeccaEntity { "utcDateLastEdited", "utcDateCreated", "utcDateModified", "blobId"]; } revisionId?: string; - noteId: string; - type: string; - mime: string; - isProtected: boolean; - title: string; + noteId!: string; + type!: string; + mime!: string; + isProtected!: boolean; + title!: string; blobId?: string; dateLastEdited?: string; - dateCreated: string; + dateCreated!: string; utcDateLastEdited?: string; - utcDateCreated: string; + utcDateCreated!: string; contentLength?: number; content?: string; constructor(row: RevisionRow, titleDecrypted = false) { super(); + this.updateFromRow(row); + if (this.isProtected && !titleDecrypted) { + const decryptedTitle = protectedSessionService.isProtectedSessionAvailable() ? protectedSessionService.decryptString(this.title) : null; + this.title = decryptedTitle || "[protected]"; + } + } + + updateFromRow(row: RevisionRow) { this.revisionId = row.revisionId; this.noteId = row.noteId; this.type = row.type; @@ -58,11 +66,6 @@ class BRevision extends AbstractBeccaEntity { this.utcDateCreated = row.utcDateCreated; this.utcDateModified = row.utcDateModified; this.contentLength = row.contentLength; - - if (this.isProtected && !titleDecrypted) { - const decryptedTitle = protectedSessionService.isProtectedSessionAvailable() ? protectedSessionService.decryptString(this.title) : null; - this.title = decryptedTitle || "[protected]"; - } } getNote() { diff --git a/src/becca/entities/rows.ts b/src/becca/entities/rows.ts index 567f14f9f..4f2113f2e 100644 --- a/src/becca/entities/rows.ts +++ b/src/becca/entities/rows.ts @@ -5,7 +5,7 @@ export interface AttachmentRow { ownerId?: string; role: string; mime: string; - title?: string; + title: string; position?: number; blobId?: string; isProtected?: boolean; @@ -13,7 +13,7 @@ export interface AttachmentRow { utcDateModified?: string; utcDateScheduledForErasureSince?: string; contentLength?: number; - content?: string; + content?: Buffer | string; } export interface RevisionRow { @@ -46,12 +46,12 @@ export interface OptionRow { } export interface EtapiTokenRow { - etapiTokenId: string; + etapiTokenId?: string; name: string; tokenHash: string; utcDateCreated?: string; utcDateModified?: string; - isDeleted: boolean; + isDeleted?: boolean; } export interface BlobRow { @@ -69,9 +69,9 @@ export interface AttributeRow { noteId: string; type: AttributeType; name: string; - position: number; - value: string; - isInheritable: boolean; + position?: number; + value?: string; + isInheritable?: boolean; utcDateModified?: string; } @@ -79,9 +79,10 @@ export interface BranchRow { branchId?: string; noteId: string; parentNoteId: string; - prefix: string | null; - notePosition: number; - isExpanded: boolean; + prefix?: string | null; + notePosition: number | null; + isExpanded?: boolean; + isDeleted?: boolean; utcDateModified?: string; } @@ -94,13 +95,15 @@ export type NoteType = ("file" | "image" | "search" | "noteMap" | "launcher" | " export interface NoteRow { noteId: string; + deleteId: string; title: string; type: NoteType; mime: string; isProtected: boolean; + isDeleted: boolean; blobId: string; dateCreated: string; dateModified: string; utcDateCreated: string; utcDateModified: string; -} \ No newline at end of file +} diff --git a/src/becca/entity_constructor.js b/src/becca/entity_constructor.js deleted file mode 100644 index c140b8a75..000000000 --- a/src/becca/entity_constructor.js +++ /dev/null @@ -1,33 +0,0 @@ -const BAttachment = require('./entities/battachment'); -const BAttribute = require('./entities/battribute'); -const BBlob = require('./entities/bblob'); -const BBranch = require('./entities/bbranch'); -const BEtapiToken = require('./entities/betapi_token'); -const BNote = require('./entities/bnote'); -const BOption = require('./entities/boption'); -const BRecentNote = require('./entities/brecent_note'); -const BRevision = require('./entities/brevision'); - -const ENTITY_NAME_TO_ENTITY = { - "attachments": BAttachment, - "attributes": BAttribute, - "blobs": BBlob, - "branches": BBranch, - "etapi_tokens": BEtapiToken, - "notes": BNote, - "options": BOption, - "recent_notes": BRecentNote, - "revisions": BRevision -}; - -function getEntityFromEntityName(entityName) { - if (!(entityName in ENTITY_NAME_TO_ENTITY)) { - throw new Error(`Entity for table '${entityName}' not found!`); - } - - return ENTITY_NAME_TO_ENTITY[entityName]; -} - -module.exports = { - getEntityFromEntityName -}; diff --git a/src/becca/entity_constructor.ts b/src/becca/entity_constructor.ts new file mode 100644 index 000000000..01c51363a --- /dev/null +++ b/src/becca/entity_constructor.ts @@ -0,0 +1,37 @@ +import { ConstructorData } from './becca-interface'; +import AbstractBeccaEntity = require('./entities/abstract_becca_entity'); +import BAttachment = require('./entities/battachment'); +import BAttribute = require('./entities/battribute'); +import BBlob = require('./entities/bblob'); +import BBranch = require('./entities/bbranch'); +import BEtapiToken = require('./entities/betapi_token'); +import BNote = require('./entities/bnote'); +import BOption = require('./entities/boption'); +import BRecentNote = require('./entities/brecent_note'); +import BRevision = require('./entities/brevision'); + +type EntityClass = new (row?: any) => AbstractBeccaEntity; + +const ENTITY_NAME_TO_ENTITY: Record & EntityClass> = { + "attachments": BAttachment, + "attributes": BAttribute, + "blobs": BBlob, + "branches": BBranch, + "etapi_tokens": BEtapiToken, + "notes": BNote, + "options": BOption, + "recent_notes": BRecentNote, + "revisions": BRevision +}; + +function getEntityFromEntityName(entityName: keyof typeof ENTITY_NAME_TO_ENTITY) { + if (!(entityName in ENTITY_NAME_TO_ENTITY)) { + throw new Error(`Entity for table '${entityName}' not found!`); + } + + return ENTITY_NAME_TO_ENTITY[entityName]; +} + +export = { + getEntityFromEntityName +}; diff --git a/src/becca/similarity.js b/src/becca/similarity.ts similarity index 91% rename from src/becca/similarity.js rename to src/becca/similarity.ts index 07f643a7d..e6721d0df 100644 --- a/src/becca/similarity.js +++ b/src/becca/similarity.ts @@ -1,8 +1,9 @@ -const becca = require('./becca'); -const log = require('../services/log'); -const beccaService = require('./becca_service'); -const dateUtils = require('../services/date_utils'); -const {JSDOM} = require("jsdom"); +import becca = require('./becca'); +import log = require('../services/log'); +import beccaService = require('./becca_service'); +import dateUtils = require('../services/date_utils'); +import { JSDOM } from "jsdom"; +import BNote = require('./entities/bnote'); const DEBUG = false; @@ -32,21 +33,25 @@ const IGNORED_ATTR_NAMES = [ "pageurl", ]; -function filterUrlValue(value) { +interface DateLimits { + minDate: string; + minExcludedDate: string; + maxExcludedDate: string; + maxDate: string; +} + +function filterUrlValue(value: string) { return value .replace(/https?:\/\//ig, "") .replace(/www.js\./ig, "") .replace(/(\.net|\.com|\.org|\.info|\.edu)/ig, ""); } -/** - * @param {BNote} note - */ -function buildRewardMap(note) { +function buildRewardMap(note: BNote) { // Need to use Map instead of object: https://github.com/zadam/trilium/issues/1895 const map = new Map(); - function addToRewardMap(text, rewardFactor) { + function addToRewardMap(text: string | undefined | null, rewardFactor: number) { if (!text) { return; } @@ -126,7 +131,7 @@ function buildRewardMap(note) { const content = note.getContent(); const dom = new JSDOM(content); - function addHeadingsToRewardMap(elName, rewardFactor) { + const addHeadingsToRewardMap = (elName: string, rewardFactor: number) => { for (const el of dom.window.document.querySelectorAll(elName)) { addToRewardMap(el.textContent, rewardFactor); } @@ -146,9 +151,9 @@ function buildRewardMap(note) { return map; } -const mimeCache = {}; +const mimeCache: Record = {}; -function trimMime(mime) { +function trimMime(mime: string) { if (!mime || mime === 'text/html') { return; } @@ -173,7 +178,7 @@ function trimMime(mime) { return mimeCache[mime]; } -function buildDateLimits(baseNote) { +function buildDateLimits(baseNote: BNote): DateLimits { const dateCreatedTs = dateUtils.parseDateTime(baseNote.utcDateCreated).getTime(); return { @@ -193,7 +198,7 @@ const WORD_BLACKLIST = [ "than", "then", "and", "either", "or", "neither", "nor", "both", "also" ]; -function splitToWords(text) { +function splitToWords(text: string) { let words = wordCache.get(text); if (!words) { @@ -221,13 +226,13 @@ function splitToWords(text) { * includeNoteLink and imageLink relation mean that notes are clearly related, but so clearly * that it doesn't actually need to be shown to the user. */ -function hasConnectingRelation(sourceNote, targetNote) { +function hasConnectingRelation(sourceNote: BNote, targetNote: BNote) { return sourceNote.getAttributes().find(attr => attr.type === 'relation' && ['includenotelink', 'imagelink'].includes(attr.name) && attr.value === targetNote.noteId); } -async function findSimilarNotes(noteId) { +async function findSimilarNotes(noteId: string) { const results = []; let i = 0; @@ -237,23 +242,23 @@ async function findSimilarNotes(noteId) { return []; } - let dateLimits; + let dateLimits: DateLimits; try { dateLimits = buildDateLimits(baseNote); } - catch (e) { + catch (e: any) { throw new Error(`Date limits failed with ${e.message}, entity: ${JSON.stringify(baseNote.getPojo())}`); } const rewardMap = buildRewardMap(baseNote); - let ancestorRewardCache = {}; + let ancestorRewardCache: Record = {}; const ancestorNoteIds = new Set(baseNote.getAncestors().map(note => note.noteId)); ancestorNoteIds.add(baseNote.noteId); let displayRewards = false; - function gatherRewards(text, factor = 1) { + function gatherRewards(text?: string | null, factor: number = 1) { if (!text) { return 0; } @@ -279,7 +284,7 @@ async function findSimilarNotes(noteId) { return counter; } - function gatherAncestorRewards(note) { + function gatherAncestorRewards(note?: BNote) { if (!note || ancestorNoteIds.has(note.noteId)) { return 0; } @@ -311,7 +316,7 @@ async function findSimilarNotes(noteId) { return ancestorRewardCache[note.noteId]; } - function computeScore(candidateNote) { + function computeScore(candidateNote: BNote) { let score = gatherRewards(trimMime(candidateNote.mime)) + gatherAncestorRewards(candidateNote); @@ -451,11 +456,11 @@ async function findSimilarNotes(noteId) { * see https://snyk.io/blog/nodejs-how-even-quick-async-functions-can-block-the-event-loop-starve-io/ */ function setImmediatePromise() { - return new Promise((resolve) => { + return new Promise((resolve) => { setTimeout(() => resolve(), 0); }); } -module.exports = { +export = { findSimilarNotes }; diff --git a/src/errors/validation_error.ts b/src/errors/validation_error.ts index 0cabecc8e..8b872bcbe 100644 --- a/src/errors/validation_error.ts +++ b/src/errors/validation_error.ts @@ -6,4 +6,4 @@ class ValidationError { } } -module.exports = ValidationError; \ No newline at end of file +export = ValidationError; \ No newline at end of file diff --git a/src/etapi/app_info.js b/src/etapi/app_info.js index a58850e77..20c1381f0 100644 --- a/src/etapi/app_info.js +++ b/src/etapi/app_info.js @@ -1,4 +1,4 @@ -const appInfo = require('../services/app_info.js'); +const appInfo = require('../services/app_info'); const eu = require('./etapi_utils'); function register(router) { diff --git a/src/etapi/auth.js b/src/etapi/auth.js index 835d016a1..7a3b258d8 100644 --- a/src/etapi/auth.js +++ b/src/etapi/auth.js @@ -1,7 +1,7 @@ const becca = require('../becca/becca'); const eu = require('./etapi_utils'); const passwordEncryptionService = require('../services/encryption/password_encryption'); -const etapiTokenService = require('../services/etapi_tokens.js'); +const etapiTokenService = require('../services/etapi_tokens'); function register(router, loginMiddleware) { eu.NOT_AUTHENTICATED_ROUTE(router, 'post', '/etapi/auth/login', loginMiddleware, (req, res, next) => { diff --git a/src/etapi/backup.js b/src/etapi/backup.js index 2897c36b2..7900570c4 100644 --- a/src/etapi/backup.js +++ b/src/etapi/backup.js @@ -1,5 +1,5 @@ const eu = require('./etapi_utils'); -const backupService = require('../services/backup.js'); +const backupService = require('../services/backup'); function register(router) { eu.route(router, 'put', '/etapi/backup/:backupName', async (req, res, next) => { diff --git a/src/etapi/etapi_utils.js b/src/etapi/etapi_utils.js index 0ec013b20..b5928f4d7 100644 --- a/src/etapi/etapi_utils.js +++ b/src/etapi/etapi_utils.js @@ -2,7 +2,7 @@ const cls = require('../services/cls'); const sql = require('../services/sql'); const log = require('../services/log'); const becca = require('../becca/becca'); -const etapiTokenService = require('../services/etapi_tokens.js'); +const etapiTokenService = require('../services/etapi_tokens'); const config = require('../services/config'); const GENERIC_CODE = "GENERIC"; diff --git a/src/etapi/notes.js b/src/etapi/notes.js index 76318a89d..0d96468d4 100644 --- a/src/etapi/notes.js +++ b/src/etapi/notes.js @@ -2,7 +2,7 @@ const becca = require('../becca/becca'); const utils = require('../services/utils'); const eu = require('./etapi_utils'); const mappers = require('./mappers.js'); -const noteService = require('../services/notes.js'); +const noteService = require('../services/notes'); const TaskContext = require('../services/task_context'); const v = require('./validators.js'); const searchService = require('../services/search/services/search.js'); diff --git a/src/etapi/validators.js b/src/etapi/validators.js index 59ddc1675..bcca288ca 100644 --- a/src/etapi/validators.js +++ b/src/etapi/validators.js @@ -1,4 +1,4 @@ -const noteTypeService = require('../services/note_types.js'); +const noteTypeService = require('../services/note_types'); const dateUtils = require('../services/date_utils'); function mandatory(obj) { diff --git a/src/routes/api/app_info.js b/src/routes/api/app_info.js index cb1996656..aec592909 100644 --- a/src/routes/api/app_info.js +++ b/src/routes/api/app_info.js @@ -1,6 +1,6 @@ "use strict"; -const appInfo = require('../../services/app_info.js'); +const appInfo = require('../../services/app_info'); function getAppInfo() { return appInfo; diff --git a/src/routes/api/autocomplete.js b/src/routes/api/autocomplete.js index a3113ba62..4d48709a0 100644 --- a/src/routes/api/autocomplete.js +++ b/src/routes/api/autocomplete.js @@ -1,6 +1,6 @@ "use strict"; -const beccaService = require('../../becca/becca_service.js'); +const beccaService = require('../../becca/becca_service'); const searchService = require('../../services/search/services/search.js'); const log = require('../../services/log'); const utils = require('../../services/utils'); diff --git a/src/routes/api/branches.js b/src/routes/api/branches.js index cbc951f1e..ae06efdda 100644 --- a/src/routes/api/branches.js +++ b/src/routes/api/branches.js @@ -4,7 +4,7 @@ const sql = require('../../services/sql'); const utils = require('../../services/utils'); const entityChangesService = require('../../services/entity_changes'); const treeService = require('../../services/tree.js'); -const eraseService = require('../../services/erase.js'); +const eraseService = require('../../services/erase'); const becca = require('../../becca/becca'); const TaskContext = require('../../services/task_context'); const branchService = require('../../services/branches.js'); diff --git a/src/routes/api/clipper.js b/src/routes/api/clipper.js index ea4fb64c3..b5cda8c31 100644 --- a/src/routes/api/clipper.js +++ b/src/routes/api/clipper.js @@ -2,17 +2,17 @@ const attributeService = require('../../services/attributes.js'); const cloneService = require('../../services/cloning.js'); -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const dateNoteService = require('../../services/date_notes.js'); const dateUtils = require('../../services/date_utils'); const imageService = require('../../services/image.js'); -const appInfo = require('../../services/app_info.js'); +const appInfo = require('../../services/app_info'); const ws = require('../../services/ws'); const log = require('../../services/log'); const utils = require('../../services/utils'); const path = require('path'); -const htmlSanitizer = require('../../services/html_sanitizer.js'); -const {formatAttrForSearch} = require('../../services/attribute_formatter.js'); +const htmlSanitizer = require('../../services/html_sanitizer'); +const {formatAttrForSearch} = require('../../services/attribute_formatter'); const jsdom = require("jsdom"); const { JSDOM } = jsdom; diff --git a/src/routes/api/database.js b/src/routes/api/database.js index 6c8063fe1..4ea27e8ad 100644 --- a/src/routes/api/database.js +++ b/src/routes/api/database.js @@ -2,8 +2,8 @@ const sql = require('../../services/sql'); const log = require('../../services/log'); -const backupService = require('../../services/backup.js'); -const anonymizationService = require('../../services/anonymization.js'); +const backupService = require('../../services/backup'); +const anonymizationService = require('../../services/anonymization'); const consistencyChecksService = require('../../services/consistency_checks.js'); function getExistingBackups() { diff --git a/src/routes/api/etapi_tokens.js b/src/routes/api/etapi_tokens.js index 2c334643a..b0d29db3e 100644 --- a/src/routes/api/etapi_tokens.js +++ b/src/routes/api/etapi_tokens.js @@ -1,4 +1,4 @@ -const etapiTokenService = require('../../services/etapi_tokens.js'); +const etapiTokenService = require('../../services/etapi_tokens'); function getTokens() { const tokens = etapiTokenService.getTokens(); diff --git a/src/routes/api/files.js b/src/routes/api/files.js index 5e0c391d6..f368383d5 100644 --- a/src/routes/api/files.js +++ b/src/routes/api/files.js @@ -3,7 +3,7 @@ const protectedSessionService = require('../../services/protected_session'); const utils = require('../../services/utils'); const log = require('../../services/log'); -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const tmp = require('tmp'); const fs = require('fs'); const { Readable } = require('stream'); diff --git a/src/routes/api/import.js b/src/routes/api/import.js index 115546cae..19b54e8e9 100644 --- a/src/routes/api/import.js +++ b/src/routes/api/import.js @@ -7,7 +7,7 @@ const singleImportService = require('../../services/import/single.js'); const cls = require('../../services/cls'); const path = require('path'); const becca = require('../../becca/becca'); -const beccaLoader = require('../../becca/becca_loader.js'); +const beccaLoader = require('../../becca/becca_loader'); const log = require('../../services/log'); const TaskContext = require('../../services/task_context'); const ValidationError = require('../../errors/validation_error'); diff --git a/src/routes/api/keys.js b/src/routes/api/keys.js index a2f2a0b37..bc1b97d4a 100644 --- a/src/routes/api/keys.js +++ b/src/routes/api/keys.js @@ -1,6 +1,6 @@ "use strict"; -const keyboardActions = require('../../services/keyboard_actions.js'); +const keyboardActions = require('../../services/keyboard_actions'); const becca = require('../../becca/becca'); function getKeyboardActions() { diff --git a/src/routes/api/login.js b/src/routes/api/login.js index 7a5ff5a04..9cb0ec8bc 100644 --- a/src/routes/api/login.js +++ b/src/routes/api/login.js @@ -6,12 +6,12 @@ const dateUtils = require('../../services/date_utils'); const instanceId = require('../../services/instance_id'); const passwordEncryptionService = require('../../services/encryption/password_encryption'); const protectedSessionService = require('../../services/protected_session'); -const appInfo = require('../../services/app_info.js'); +const appInfo = require('../../services/app_info'); const eventService = require('../../services/events'); -const sqlInit = require('../../services/sql_init.js'); +const sqlInit = require('../../services/sql_init'); const sql = require('../../services/sql'); const ws = require('../../services/ws'); -const etapiTokenService = require('../../services/etapi_tokens.js'); +const etapiTokenService = require('../../services/etapi_tokens'); function loginSync(req) { if (!sqlInit.schemaExists()) { diff --git a/src/routes/api/notes.js b/src/routes/api/notes.js index 467a089f7..4f3f89176 100644 --- a/src/routes/api/notes.js +++ b/src/routes/api/notes.js @@ -1,7 +1,7 @@ "use strict"; -const noteService = require('../../services/notes.js'); -const eraseService = require('../../services/erase.js'); +const noteService = require('../../services/notes'); +const eraseService = require('../../services/erase'); const treeService = require('../../services/tree.js'); const sql = require('../../services/sql'); const utils = require('../../services/utils'); diff --git a/src/routes/api/recent_changes.js b/src/routes/api/recent_changes.js index 6fea4a105..44e964ecf 100644 --- a/src/routes/api/recent_changes.js +++ b/src/routes/api/recent_changes.js @@ -2,7 +2,7 @@ const sql = require('../../services/sql'); const protectedSessionService = require('../../services/protected_session'); -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const becca = require('../../becca/becca'); function getRecentChanges(req) { diff --git a/src/routes/api/revisions.js b/src/routes/api/revisions.js index e4a843016..e317fec95 100644 --- a/src/routes/api/revisions.js +++ b/src/routes/api/revisions.js @@ -1,14 +1,14 @@ "use strict"; -const beccaService = require('../../becca/becca_service.js'); -const revisionService = require('../../services/revisions.js'); +const beccaService = require('../../becca/becca_service'); +const revisionService = require('../../services/revisions'); const utils = require('../../services/utils'); const sql = require('../../services/sql'); const cls = require('../../services/cls'); const path = require('path'); const becca = require('../../becca/becca'); const blobService = require('../../services/blob'); -const eraseService = require("../../services/erase.js"); +const eraseService = require("../../services/erase"); function getRevisionBlob(req) { const preview = req.query.preview === 'true'; diff --git a/src/routes/api/search.js b/src/routes/api/search.js index b98e08517..a582dafcd 100644 --- a/src/routes/api/search.js +++ b/src/routes/api/search.js @@ -5,7 +5,7 @@ const SearchContext = require('../../services/search/search_context.js'); const searchService = require('../../services/search/services/search.js'); const bulkActionService = require('../../services/bulk_actions.js'); const cls = require('../../services/cls'); -const {formatAttrForSearch} = require('../../services/attribute_formatter.js'); +const {formatAttrForSearch} = require('../../services/attribute_formatter'); const ValidationError = require('../../errors/validation_error'); function searchFromNote(req) { diff --git a/src/routes/api/sender.js b/src/routes/api/sender.js index 000d1eecb..3c405ecc1 100644 --- a/src/routes/api/sender.js +++ b/src/routes/api/sender.js @@ -2,7 +2,7 @@ const imageType = require('image-type'); const imageService = require('../../services/image.js'); -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const {sanitizeAttributeName} = require('../../services/sanitize_attribute_name'); const specialNotesService = require('../../services/special_notes.js'); diff --git a/src/routes/api/setup.js b/src/routes/api/setup.js index bd328509f..000c3c21c 100644 --- a/src/routes/api/setup.js +++ b/src/routes/api/setup.js @@ -1,9 +1,9 @@ "use strict"; -const sqlInit = require('../../services/sql_init.js'); +const sqlInit = require('../../services/sql_init'); const setupService = require('../../services/setup.js'); const log = require('../../services/log'); -const appInfo = require('../../services/app_info.js'); +const appInfo = require('../../services/app_info'); function getStatus() { return { diff --git a/src/routes/api/similar_notes.js b/src/routes/api/similar_notes.js index 3dee1d0c1..555efd1b5 100644 --- a/src/routes/api/similar_notes.js +++ b/src/routes/api/similar_notes.js @@ -1,6 +1,6 @@ "use strict"; -const similarityService = require('../../becca/similarity.js'); +const similarityService = require('../../becca/similarity'); const becca = require('../../becca/becca'); async function getSimilarNotes(req) { diff --git a/src/routes/api/sync.js b/src/routes/api/sync.js index ab32eb671..68ac0a6e8 100644 --- a/src/routes/api/sync.js +++ b/src/routes/api/sync.js @@ -4,11 +4,11 @@ const syncService = require('../../services/sync.js'); const syncUpdateService = require('../../services/sync_update.js'); const entityChangesService = require('../../services/entity_changes'); const sql = require('../../services/sql'); -const sqlInit = require('../../services/sql_init.js'); +const sqlInit = require('../../services/sql_init'); const optionService = require('../../services/options'); const contentHashService = require('../../services/content_hash.js'); const log = require('../../services/log'); -const syncOptions = require('../../services/sync_options.js'); +const syncOptions = require('../../services/sync_options'); const utils = require('../../services/utils'); const ws = require('../../services/ws'); diff --git a/src/routes/assets.js b/src/routes/assets.js index 3f5b81013..b6b46332c 100644 --- a/src/routes/assets.js +++ b/src/routes/assets.js @@ -1,4 +1,4 @@ -const assetPath = require('../services/asset_path.js'); +const assetPath = require('../services/asset_path'); const path = require("path"); const express = require("express"); const env = require('../services/env'); diff --git a/src/routes/index.js b/src/routes/index.js index 277d58381..c9c0a33de 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -9,8 +9,8 @@ const env = require('../services/env'); const utils = require('../services/utils'); const protectedSessionService = require('../services/protected_session'); const packageJson = require('../../package.json'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); function index(req, res) { const options = optionService.getOptionMap(); diff --git a/src/routes/login.js b/src/routes/login.js index 5aea1a6d5..649e9b854 100644 --- a/src/routes/login.js +++ b/src/routes/login.js @@ -5,8 +5,8 @@ const optionService = require('../services/options'); const myScryptService = require('../services/encryption/my_scrypt'); const log = require('../services/log'); const passwordService = require('../services/encryption/password'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); const ValidationError = require('../errors/validation_error'); function loginPage(req, res) { diff --git a/src/routes/routes.js b/src/routes/routes.js index aa67f7dba..2f2bdcb7d 100644 --- a/src/routes/routes.js +++ b/src/routes/routes.js @@ -5,7 +5,7 @@ const multer = require('multer'); const log = require('../services/log'); const express = require('express'); const router = express.Router(); -const auth = require('../services/auth.js'); +const auth = require('../services/auth'); const cls = require('../services/cls'); const sql = require('../services/sql'); const entityChangesService = require('../services/entity_changes'); @@ -28,14 +28,14 @@ const branchesApiRoute = require('./api/branches.js'); const attachmentsApiRoute = require('./api/attachments.js'); const autocompleteApiRoute = require('./api/autocomplete.js'); const cloningApiRoute = require('./api/cloning.js'); -const revisionsApiRoute = require('./api/revisions.js'); +const revisionsApiRoute = require('./api/revisions'); const recentChangesApiRoute = require('./api/recent_changes.js'); const optionsApiRoute = require('./api/options.js'); const passwordApiRoute = require('./api/password'); const syncApiRoute = require('./api/sync.js'); const loginApiRoute = require('./api/login.js'); const recentNotesRoute = require('./api/recent_notes.js'); -const appInfoRoute = require('./api/app_info.js'); +const appInfoRoute = require('./api/app_info'); const exportRoute = require('./api/export.js'); const importRoute = require('./api/import.js'); const setupApiRoute = require('./api/setup.js'); @@ -56,20 +56,20 @@ const keysRoute = require('./api/keys.js'); const backendLogRoute = require('./api/backend_log.js'); const statsRoute = require('./api/stats.js'); const fontsRoute = require('./api/fonts.js'); -const etapiTokensApiRoutes = require('./api/etapi_tokens.js'); +const etapiTokensApiRoutes = require('./api/etapi_tokens'); const relationMapApiRoute = require('./api/relation-map'); const otherRoute = require('./api/other.js'); const shareRoutes = require('../share/routes.js'); const etapiAuthRoutes = require('../etapi/auth.js'); -const etapiAppInfoRoutes = require('../etapi/app_info.js'); +const etapiAppInfoRoutes = require('../etapi/app_info'); const etapiAttachmentRoutes = require('../etapi/attachments.js'); const etapiAttributeRoutes = require('../etapi/attributes.js'); const etapiBranchRoutes = require('../etapi/branches.js'); const etapiNoteRoutes = require('../etapi/notes.js'); const etapiSpecialNoteRoutes = require('../etapi/special_notes.js'); const etapiSpecRoute = require('../etapi/spec.js'); -const etapiBackupRoute = require('../etapi/backup.js'); +const etapiBackupRoute = require('../etapi/backup'); const csrfMiddleware = csurf({ cookie: true, diff --git a/src/routes/setup.js b/src/routes/setup.js index fe74c68a5..d3af71bef 100644 --- a/src/routes/setup.js +++ b/src/routes/setup.js @@ -1,10 +1,10 @@ "use strict"; -const sqlInit = require('../services/sql_init.js'); +const sqlInit = require('../services/sql_init'); const setupService = require('../services/setup.js'); const utils = require('../services/utils'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); function setupPage(req, res) { if (sqlInit.isDbInitialized()) { diff --git a/src/services/anonymization.js b/src/services/anonymization.ts similarity index 90% rename from src/services/anonymization.js rename to src/services/anonymization.ts index 6a223ec4e..09599dcd4 100644 --- a/src/services/anonymization.js +++ b/src/services/anonymization.ts @@ -1,10 +1,10 @@ -const BUILTIN_ATTRIBUTES = require('./builtin_attributes.js'); -const fs = require("fs-extra"); -const dataDir = require('./data_dir'); -const dateUtils = require('./date_utils'); -const Database = require("better-sqlite3"); -const sql = require('./sql'); -const path = require("path"); +import BUILTIN_ATTRIBUTES = require('./builtin_attributes'); +import fs = require("fs-extra"); +import dataDir = require('./data_dir'); +import dateUtils = require('./date_utils'); +import Database = require("better-sqlite3"); +import sql = require('./sql'); +import path = require("path"); function getFullAnonymizationScript() { // we want to delete all non-builtin attributes because they can contain sensitive names and values @@ -48,7 +48,7 @@ function getLightAnonymizationScript() { AND value != '';`; } -async function createAnonymizedCopy(type) { +async function createAnonymizedCopy(type: "full" | "light") { if (!['full', 'light'].includes(type)) { throw new Error(`Unrecognized anonymization type '${type}'`); } diff --git a/src/services/app_icon.js b/src/services/app_icon.ts similarity index 75% rename from src/services/app_icon.js rename to src/services/app_icon.ts index bc845ab8c..91f85d3e9 100644 --- a/src/services/app_icon.js +++ b/src/services/app_icon.ts @@ -1,12 +1,12 @@ "use strict"; -const path = require('path'); -const {ELECTRON_APP_ROOT_DIR} = require('./resource_dir'); -const log = require('./log'); -const os = require('os'); -const fs = require('fs'); -const config = require('./config'); -const utils = require('./utils'); +import path = require('path'); +import resourceDir = require('./resource_dir'); +import log = require('./log'); +import os = require('os'); +import fs = require('fs'); +import config = require('./config'); +import utils = require('./utils'); const template = `[Desktop Entry] Type=Application @@ -28,7 +28,7 @@ function installLocalAppIcon() { return; } - if (!fs.existsSync(path.resolve(ELECTRON_APP_ROOT_DIR, "trilium-portable.sh"))) { + if (!fs.existsSync(path.resolve(resourceDir.ELECTRON_APP_ROOT_DIR, "trilium-portable.sh"))) { // simple heuristic to detect ".tar.xz" linux build (i.e., not flatpak, not debian) // only in such case it's necessary to create an icon return; @@ -56,16 +56,16 @@ function installLocalAppIcon() { function getDesktopFileContent() { return template - .replace("#APP_ROOT_DIR#", escapePath(ELECTRON_APP_ROOT_DIR)) + .replace("#APP_ROOT_DIR#", escapePath(resourceDir.ELECTRON_APP_ROOT_DIR)) .replace("#EXE_PATH#", escapePath(getExePath())); } -function escapePath(path) { +function escapePath(path: string) { return path.replace(/ /g, "\\ "); } function getExePath() { - return path.resolve(ELECTRON_APP_ROOT_DIR, 'trilium'); + return path.resolve(resourceDir.ELECTRON_APP_ROOT_DIR, 'trilium'); } module.exports = { diff --git a/src/services/app_info.js b/src/services/app_info.ts similarity index 69% rename from src/services/app_info.js rename to src/services/app_info.ts index 1d419a4bf..92bf2dee1 100644 --- a/src/services/app_info.js +++ b/src/services/app_info.ts @@ -1,21 +1,21 @@ "use strict"; -const build = require('./build.js'); -const packageJson = require('../../package.json'); -const {TRILIUM_DATA_DIR} = require('./data_dir'); +import build = require('./build'); +import packageJson = require('../../package.json'); +import dataDir = require('./data_dir'); const APP_DB_VERSION = 228; const SYNC_VERSION = 32; const CLIPPER_PROTOCOL_VERSION = "1.0"; -module.exports = { +export = { appVersion: packageJson.version, dbVersion: APP_DB_VERSION, nodeVersion: process.version, syncVersion: SYNC_VERSION, buildDate: build.buildDate, buildRevision: build.buildRevision, - dataDirectory: TRILIUM_DATA_DIR, + dataDirectory: dataDir.TRILIUM_DATA_DIR, clipperProtocolVersion: CLIPPER_PROTOCOL_VERSION, utcDateTime: new Date().toISOString() // for timezone inference }; diff --git a/src/services/app_path.js b/src/services/app_path.js deleted file mode 100644 index 9f28c9fa2..000000000 --- a/src/services/app_path.js +++ /dev/null @@ -1,6 +0,0 @@ -const assetPath = require('./asset_path.js'); -const env = require('./env'); - -module.exports = env.isDev() - ? assetPath + "/app" - : assetPath + "/app-dist"; diff --git a/src/services/app_path.ts b/src/services/app_path.ts new file mode 100644 index 000000000..3bfa7de40 --- /dev/null +++ b/src/services/app_path.ts @@ -0,0 +1,6 @@ +import assetPath = require('./asset_path'); +import env = require('./env'); + +export = env.isDev() + ? assetPath + "/app" + : assetPath + "/app-dist"; diff --git a/src/services/asset_path.js b/src/services/asset_path.js deleted file mode 100644 index a32ebc553..000000000 --- a/src/services/asset_path.js +++ /dev/null @@ -1,3 +0,0 @@ -const packageJson = require('../../package.json'); - -module.exports = `assets/v${packageJson.version}`; diff --git a/src/services/asset_path.ts b/src/services/asset_path.ts new file mode 100644 index 000000000..53ffebba9 --- /dev/null +++ b/src/services/asset_path.ts @@ -0,0 +1,3 @@ +import packageJson = require('../../package.json'); + +export = `assets/v${packageJson.version}`; diff --git a/src/services/attribute_formatter.js b/src/services/attribute_formatter.ts similarity index 83% rename from src/services/attribute_formatter.js rename to src/services/attribute_formatter.ts index c8a9c1de6..846450f36 100644 --- a/src/services/attribute_formatter.js +++ b/src/services/attribute_formatter.ts @@ -1,6 +1,8 @@ "use strict"; -function formatAttrForSearch(attr, searchWithValue) { +import BAttribute = require("../becca/entities/battribute"); + +function formatAttrForSearch(attr: BAttribute, searchWithValue: string) { let searchStr = ''; if (attr.type === 'label') { @@ -27,7 +29,7 @@ function formatAttrForSearch(attr, searchWithValue) { return searchStr; } -function formatValue(val) { +function formatValue(val: string) { if (!/[^\w]/.test(val)) { return val; } @@ -45,6 +47,6 @@ function formatValue(val) { } } -module.exports = { +export = { formatAttrForSearch }; diff --git a/src/services/attributes.js b/src/services/attributes.js index 685225ae7..fc527429c 100644 --- a/src/services/attributes.js +++ b/src/services/attributes.js @@ -4,8 +4,8 @@ const searchService = require('./search/services/search.js'); const sql = require('./sql'); const becca = require('../becca/becca'); const BAttribute = require('../becca/entities/battribute'); -const {formatAttrForSearch} = require('./attribute_formatter.js'); -const BUILTIN_ATTRIBUTES = require('./builtin_attributes.js'); +const {formatAttrForSearch} = require('./attribute_formatter'); +const BUILTIN_ATTRIBUTES = require('./builtin_attributes'); const ATTRIBUTE_TYPES = ['label', 'relation']; diff --git a/src/services/auth.js b/src/services/auth.ts similarity index 65% rename from src/services/auth.js rename to src/services/auth.ts index d212f6676..d54a1ea96 100644 --- a/src/services/auth.js +++ b/src/services/auth.ts @@ -1,16 +1,27 @@ "use strict"; -const etapiTokenService = require('./etapi_tokens.js'); -const log = require('./log'); -const sqlInit = require('./sql_init.js'); -const utils = require('./utils'); -const passwordEncryptionService = require('./encryption/password_encryption'); -const config = require('./config'); -const passwordService = require('./encryption/password'); +import etapiTokenService = require('./etapi_tokens'); +import log = require('./log'); +import sqlInit = require('./sql_init'); +import utils = require('./utils'); +import passwordEncryptionService = require('./encryption/password_encryption'); +import config = require('./config'); +import passwordService = require('./encryption/password'); +import type { NextFunction, Request, Response } from 'express'; const noAuthentication = config.General && config.General.noAuthentication === true; -function checkAuth(req, res, next) { +interface AppRequest extends Request { + headers: { + authorization?: string; + "trilium-cred"?: string; + } + session: { + loggedIn: boolean; + } +} + +function checkAuth(req: AppRequest, res: Response, next: NextFunction) { if (!sqlInit.isDbInitialized()) { res.redirect("setup"); } @@ -24,7 +35,7 @@ function checkAuth(req, res, next) { // for electron things which need network stuff // currently, we're doing that for file upload because handling form data seems to be difficult -function checkApiAuthOrElectron(req, res, next) { +function checkApiAuthOrElectron(req: AppRequest, res: Response, next: NextFunction) { if (!req.session.loggedIn && !utils.isElectron() && !noAuthentication) { reject(req, res, "Logged in session not found"); } @@ -33,7 +44,7 @@ function checkApiAuthOrElectron(req, res, next) { } } -function checkApiAuth(req, res, next) { +function checkApiAuth(req: AppRequest, res: Response, next: NextFunction) { if (!req.session.loggedIn && !noAuthentication) { reject(req, res, "Logged in session not found"); } @@ -42,7 +53,7 @@ function checkApiAuth(req, res, next) { } } -function checkAppInitialized(req, res, next) { +function checkAppInitialized(req: AppRequest, res: Response, next: NextFunction) { if (!sqlInit.isDbInitialized()) { res.redirect("setup"); } @@ -51,7 +62,7 @@ function checkAppInitialized(req, res, next) { } } -function checkPasswordSet(req, res, next) { +function checkPasswordSet(req: AppRequest, res: Response, next: NextFunction) { if (!utils.isElectron() && !passwordService.isPasswordSet()) { res.redirect("set-password"); } else { @@ -59,7 +70,7 @@ function checkPasswordSet(req, res, next) { } } -function checkPasswordNotSet(req, res, next) { +function checkPasswordNotSet(req: AppRequest, res: Response, next: NextFunction) { if (!utils.isElectron() && passwordService.isPasswordSet()) { res.redirect("login"); } else { @@ -67,7 +78,7 @@ function checkPasswordNotSet(req, res, next) { } } -function checkAppNotInitialized(req, res, next) { +function checkAppNotInitialized(req: AppRequest, res: Response, next: NextFunction) { if (sqlInit.isDbInitialized()) { reject(req, res, "App already initialized."); } @@ -76,7 +87,7 @@ function checkAppNotInitialized(req, res, next) { } } -function checkEtapiToken(req, res, next) { +function checkEtapiToken(req: AppRequest, res: Response, next: NextFunction) { if (etapiTokenService.isValidAuthHeader(req.headers.authorization)) { next(); } @@ -85,7 +96,7 @@ function checkEtapiToken(req, res, next) { } } -function reject(req, res, message) { +function reject(req: AppRequest, res: Response, message: string) { log.info(`${req.method} ${req.path} rejected with 401 ${message}`); res.setHeader("Content-Type", "text/plain") @@ -93,7 +104,7 @@ function reject(req, res, message) { .send(message); } -function checkCredentials(req, res, next) { +function checkCredentials(req: AppRequest, res: Response, next: NextFunction) { if (!sqlInit.isDbInitialized()) { res.setHeader("Content-Type", "text/plain") .status(400) @@ -109,7 +120,7 @@ function checkCredentials(req, res, next) { } const header = req.headers['trilium-cred'] || ''; - const auth = new Buffer.from(header, 'base64').toString(); + const auth = Buffer.from(header, 'base64').toString(); const colonIndex = auth.indexOf(':'); const password = colonIndex === -1 ? "" : auth.substr(colonIndex + 1); // username is ignored @@ -124,7 +135,7 @@ function checkCredentials(req, res, next) { } } -module.exports = { +export = { checkAuth, checkApiAuth, checkAppInitialized, diff --git a/src/services/backend_script_api.js b/src/services/backend_script_api.js index fc8e80aef..d8687b944 100644 --- a/src/services/backend_script_api.js +++ b/src/services/backend_script_api.js @@ -1,5 +1,5 @@ const log = require('./log'); -const noteService = require('./notes.js'); +const noteService = require('./notes'); const sql = require('./sql'); const utils = require('./utils'); const attributeService = require('./attributes.js'); @@ -10,7 +10,7 @@ const axios = require('axios'); const dayjs = require('dayjs'); const xml2js = require('xml2js'); const cloningService = require('./cloning.js'); -const appInfo = require('./app_info.js'); +const appInfo = require('./app_info'); const searchService = require('./search/services/search.js'); const SearchContext = require('./search/search_context.js'); const becca = require('../becca/becca'); @@ -20,7 +20,7 @@ const specialNotesService = require('./special_notes.js'); const branchService = require('./branches.js'); const exportService = require('./export/zip.js'); const syncMutex = require('./sync_mutex'); -const backupService = require('./backup.js'); +const backupService = require('./backup'); const optionsService = require('./options'); diff --git a/src/services/backup.js b/src/services/backup.ts similarity index 74% rename from src/services/backup.js rename to src/services/backup.ts index 7fa41eb5e..3000c9c9a 100644 --- a/src/services/backup.js +++ b/src/services/backup.ts @@ -1,14 +1,16 @@ "use strict"; -const dateUtils = require('./date_utils'); -const optionService = require('./options'); -const fs = require('fs-extra'); -const dataDir = require('./data_dir'); -const log = require('./log'); -const syncMutexService = require('./sync_mutex'); -const cls = require('./cls'); -const sql = require('./sql'); -const path = require('path'); +import dateUtils = require('./date_utils'); +import optionService = require('./options'); +import fs = require('fs-extra'); +import dataDir = require('./data_dir'); +import log = require('./log'); +import syncMutexService = require('./sync_mutex'); +import cls = require('./cls'); +import sql = require('./sql'); +import path = require('path'); + +type BackupType = ("daily" | "weekly" | "monthly"); function getExistingBackups() { if (!fs.existsSync(dataDir.BACKUP_DIR)) { @@ -35,13 +37,13 @@ function regularBackup() { }); } -function isBackupEnabled(backupType) { +function isBackupEnabled(backupType: BackupType) { const optionName = `${backupType}BackupEnabled`; return optionService.getOptionBool(optionName); } -function periodBackup(optionName, backupType, periodInSeconds) { +function periodBackup(optionName: string, backupType: BackupType, periodInSeconds: number) { if (!isBackupEnabled(backupType)) { return; } @@ -56,7 +58,7 @@ function periodBackup(optionName, backupType, periodInSeconds) { } } -async function backupNow(name) { +async function backupNow(name: string) { // we don't want to back up DB in the middle of sync with potentially inconsistent DB state return await syncMutexService.doExclusively(async () => { const backupFile = `${dataDir.BACKUP_DIR}/backup-${name}.db`; @@ -73,7 +75,7 @@ if (!fs.existsSync(dataDir.BACKUP_DIR)) { fs.mkdirSync(dataDir.BACKUP_DIR, 0o700); } -module.exports = { +export = { getExistingBackups, backupNow, regularBackup diff --git a/src/services/build.js b/src/services/build.js deleted file mode 100644 index aaae9be51..000000000 --- a/src/services/build.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { buildDate:"2024-03-28T07:11:39+01:00", buildRevision: "399458b52f250b22be22d980a78de0b3390d7521" }; diff --git a/src/services/build.ts b/src/services/build.ts new file mode 100644 index 000000000..b392adba0 --- /dev/null +++ b/src/services/build.ts @@ -0,0 +1 @@ +export = { buildDate:"2024-03-28T07:11:39+01:00", buildRevision: "399458b52f250b22be22d980a78de0b3390d7521" }; diff --git a/src/services/builtin_attributes.js b/src/services/builtin_attributes.ts similarity index 99% rename from src/services/builtin_attributes.js rename to src/services/builtin_attributes.ts index eb21f04f3..6ccabb4e1 100644 --- a/src/services/builtin_attributes.js +++ b/src/services/builtin_attributes.ts @@ -1,4 +1,4 @@ -module.exports = [ +export = [ // label names { type: 'label', name: 'inbox' }, { type: 'label', name: 'disableVersioning' }, diff --git a/src/services/bulk_actions.js b/src/services/bulk_actions.js index 3a274c688..136d1cccf 100644 --- a/src/services/bulk_actions.js +++ b/src/services/bulk_actions.js @@ -1,10 +1,10 @@ const log = require('./log'); -const revisionService = require('./revisions.js'); +const revisionService = require('./revisions'); const becca = require('../becca/becca'); const cloningService = require('./cloning.js'); const branchService = require('./branches.js'); const utils = require('./utils'); -const eraseService = require("./erase.js"); +const eraseService = require("./erase"); const ACTION_HANDLERS = { addLabel: (action, note) => { diff --git a/src/services/consistency_checks.js b/src/services/consistency_checks.js index 5d9ef5104..2e2d6385b 100644 --- a/src/services/consistency_checks.js +++ b/src/services/consistency_checks.js @@ -1,7 +1,7 @@ "use strict"; const sql = require('./sql'); -const sqlInit = require('./sql_init.js'); +const sqlInit = require('./sql_init'); const log = require('./log'); const ws = require('./ws'); const syncMutexService = require('./sync_mutex'); @@ -9,12 +9,12 @@ const cls = require('./cls'); const entityChangesService = require('./entity_changes'); const optionsService = require('./options'); const BBranch = require('../becca/entities/bbranch'); -const revisionService = require('./revisions.js'); +const revisionService = require('./revisions'); const becca = require('../becca/becca'); const utils = require('../services/utils'); -const eraseService = require('../services/erase.js'); +const eraseService = require('../services/erase'); const {sanitizeAttributeName} = require('./sanitize_attribute_name'); -const noteTypes = require('../services/note_types.js').getNoteTypeNames(); +const noteTypes = require('../services/note_types').getNoteTypeNames(); class ConsistencyChecks { /** @@ -766,7 +766,7 @@ class ConsistencyChecks { } if (this.reloadNeeded) { - require('../becca/becca_loader.js').reload("consistency checks need becca reload"); + require('../becca/becca_loader').reload("consistency checks need becca reload"); } return !this.unrecoveredConsistencyErrors; diff --git a/src/services/content_hash.js b/src/services/content_hash.js index a42c16503..24fbfcfaa 100644 --- a/src/services/content_hash.js +++ b/src/services/content_hash.js @@ -3,7 +3,7 @@ const sql = require('./sql'); const utils = require('./utils'); const log = require('./log'); -const eraseService = require('./erase.js'); +const eraseService = require('./erase'); function getEntityHashes() { // blob erasure is not synced, we should check before each sync if there's some blob to erase diff --git a/src/services/date_notes.js b/src/services/date_notes.js index 5d74b2f47..628ed886f 100644 --- a/src/services/date_notes.js +++ b/src/services/date_notes.js @@ -1,6 +1,6 @@ "use strict"; -const noteService = require('./notes.js'); +const noteService = require('./notes'); const attributeService = require('./attributes.js'); const dateUtils = require('./date_utils'); const sql = require('./sql'); diff --git a/src/services/date_utils.ts b/src/services/date_utils.ts index 31c58c2e4..88b6ecb69 100644 --- a/src/services/date_utils.ts +++ b/src/services/date_utils.ts @@ -65,7 +65,7 @@ function getDateTimeForFile() { return new Date().toISOString().substr(0, 19).replace(/:/g, ''); } -function validateLocalDateTime(str: string) { +function validateLocalDateTime(str: string | null | undefined) { if (!str) { return; } @@ -80,7 +80,7 @@ function validateLocalDateTime(str: string) { } } -function validateUtcDateTime(str: string) { +function validateUtcDateTime(str: string | undefined) { if (!str) { return; } diff --git a/src/services/encryption/password.ts b/src/services/encryption/password.ts index c14e27700..4d6bf66a3 100644 --- a/src/services/encryption/password.ts +++ b/src/services/encryption/password.ts @@ -78,7 +78,7 @@ function resetPassword() { }; } -module.exports = { +export = { isPasswordSet, changePassword, setPassword, diff --git a/src/services/entity_changes.ts b/src/services/entity_changes.ts index bc75432b0..91f84f834 100644 --- a/src/services/entity_changes.ts +++ b/src/services/entity_changes.ts @@ -45,7 +45,7 @@ function putEntityChange(origEntityChange: EntityChange) { cls.putEntityChange(ec); } -function putNoteReorderingEntityChange(parentNoteId: string, componentId: string) { +function putNoteReorderingEntityChange(parentNoteId: string, componentId?: string) { putEntityChange({ entityName: "note_reordering", entityId: parentNoteId, diff --git a/src/services/erase.js b/src/services/erase.ts similarity index 67% rename from src/services/erase.js rename to src/services/erase.ts index 7ffb30ebc..6e6804f3d 100644 --- a/src/services/erase.js +++ b/src/services/erase.ts @@ -1,13 +1,14 @@ -const sql = require("./sql"); -const revisionService = require("./revisions.js"); -const log = require("./log"); -const entityChangesService = require("./entity_changes"); -const optionService = require("./options"); -const dateUtils = require("./date_utils"); -const sqlInit = require("./sql_init.js"); -const cls = require("./cls"); +import sql = require("./sql"); +import revisionService = require("./revisions"); +import log = require("./log"); +import entityChangesService = require("./entity_changes"); +import optionService = require("./options"); +import dateUtils = require("./date_utils"); +import sqlInit = require("./sql_init"); +import cls = require("./cls"); +import { EntityChange } from "./entity_changes_interface"; -function eraseNotes(noteIdsToErase) { +function eraseNotes(noteIdsToErase: string[]) { if (noteIdsToErase.length === 0) { return; } @@ -16,17 +17,17 @@ function eraseNotes(noteIdsToErase) { setEntityChangesAsErased(sql.getManyRows(`SELECT * FROM entity_changes WHERE entityName = 'notes' AND entityId IN (???)`, noteIdsToErase)); // we also need to erase all "dependent" entities of the erased notes - const branchIdsToErase = sql.getManyRows(`SELECT branchId FROM branches WHERE noteId IN (???)`, noteIdsToErase) + const branchIdsToErase = sql.getManyRows<{ branchId: string }>(`SELECT branchId FROM branches WHERE noteId IN (???)`, noteIdsToErase) .map(row => row.branchId); eraseBranches(branchIdsToErase); - const attributeIdsToErase = sql.getManyRows(`SELECT attributeId FROM attributes WHERE noteId IN (???)`, noteIdsToErase) + const attributeIdsToErase = sql.getManyRows<{ attributeId: string }>(`SELECT attributeId FROM attributes WHERE noteId IN (???)`, noteIdsToErase) .map(row => row.attributeId); eraseAttributes(attributeIdsToErase); - const revisionIdsToErase = sql.getManyRows(`SELECT revisionId FROM revisions WHERE noteId IN (???)`, noteIdsToErase) + const revisionIdsToErase = sql.getManyRows<{ revisionId: string }>(`SELECT revisionId FROM revisions WHERE noteId IN (???)`, noteIdsToErase) .map(row => row.revisionId); eraseRevisions(revisionIdsToErase); @@ -34,7 +35,7 @@ function eraseNotes(noteIdsToErase) { log.info(`Erased notes: ${JSON.stringify(noteIdsToErase)}`); } -function setEntityChangesAsErased(entityChanges) { +function setEntityChangesAsErased(entityChanges: EntityChange[]) { for (const ec of entityChanges) { ec.isErased = true; // we're not changing hash here, not sure if good or not @@ -45,7 +46,7 @@ function setEntityChangesAsErased(entityChanges) { } } -function eraseBranches(branchIdsToErase) { +function eraseBranches(branchIdsToErase: string[]) { if (branchIdsToErase.length === 0) { return; } @@ -57,7 +58,7 @@ function eraseBranches(branchIdsToErase) { log.info(`Erased branches: ${JSON.stringify(branchIdsToErase)}`); } -function eraseAttributes(attributeIdsToErase) { +function eraseAttributes(attributeIdsToErase: string[]) { if (attributeIdsToErase.length === 0) { return; } @@ -69,7 +70,7 @@ function eraseAttributes(attributeIdsToErase) { log.info(`Erased attributes: ${JSON.stringify(attributeIdsToErase)}`); } -function eraseAttachments(attachmentIdsToErase) { +function eraseAttachments(attachmentIdsToErase: string[]) { if (attachmentIdsToErase.length === 0) { return; } @@ -81,7 +82,7 @@ function eraseAttachments(attachmentIdsToErase) { log.info(`Erased attachments: ${JSON.stringify(attachmentIdsToErase)}`); } -function eraseRevisions(revisionIdsToErase) { +function eraseRevisions(revisionIdsToErase: string[]) { if (revisionIdsToErase.length === 0) { return; } @@ -116,7 +117,7 @@ function eraseUnusedBlobs() { log.info(`Erased unused blobs: ${JSON.stringify(unusedBlobIds)}`); } -function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds = null) { +function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds: number | null = null) { // this is important also so that the erased entity changes are sent to the connected clients sql.transactional(() => { if (eraseEntitiesAfterTimeInSeconds === null) { @@ -125,41 +126,33 @@ function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds = null) { const cutoffDate = new Date(Date.now() - eraseEntitiesAfterTimeInSeconds * 1000); - const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseNotes(noteIdsToErase); - const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseBranches(branchIdsToErase); - const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseAttributes(attributeIdsToErase); - const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); - + const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]); eraseAttachments(attachmentIdsToErase); eraseUnusedBlobs(); }); } -function eraseNotesWithDeleteId(deleteId) { - const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - +function eraseNotesWithDeleteId(deleteId: string) { + const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseNotes(noteIdsToErase); - const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - + const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseBranches(branchIdsToErase); - const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - + const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseAttributes(attributeIdsToErase); - const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); - + const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND deleteId = ?", [deleteId]); eraseAttachments(attachmentIdsToErase); eraseUnusedBlobs(); @@ -173,13 +166,13 @@ function eraseUnusedAttachmentsNow() { eraseScheduledAttachments(0); } -function eraseScheduledAttachments(eraseUnusedAttachmentsAfterSeconds = null) { +function eraseScheduledAttachments(eraseUnusedAttachmentsAfterSeconds: number | null = null) { if (eraseUnusedAttachmentsAfterSeconds === null) { eraseUnusedAttachmentsAfterSeconds = optionService.getOptionInt('eraseUnusedAttachmentsAfterSeconds'); } const cutOffDate = dateUtils.utcDateTimeStr(new Date(Date.now() - (eraseUnusedAttachmentsAfterSeconds * 1000))); - const attachmentIdsToErase = sql.getColumn('SELECT attachmentId FROM attachments WHERE utcDateScheduledForErasureSince < ?', [cutOffDate]); + const attachmentIdsToErase = sql.getColumn('SELECT attachmentId FROM attachments WHERE utcDateScheduledForErasureSince < ?', [cutOffDate]); eraseAttachments(attachmentIdsToErase); } @@ -193,7 +186,7 @@ sqlInit.dbReady.then(() => { setInterval(cls.wrap(() => eraseScheduledAttachments()), 3600 * 1000); }); -module.exports = { +export = { eraseDeletedNotesNow, eraseUnusedAttachmentsNow, eraseNotesWithDeleteId, diff --git a/src/services/etapi_tokens.js b/src/services/etapi_tokens.ts similarity index 83% rename from src/services/etapi_tokens.js rename to src/services/etapi_tokens.ts index 1c4e0338e..2989d3923 100644 --- a/src/services/etapi_tokens.js +++ b/src/services/etapi_tokens.ts @@ -1,17 +1,17 @@ -const becca = require('../becca/becca'); -const utils = require('./utils'); -const BEtapiToken = require('../becca/entities/betapi_token'); -const crypto = require("crypto"); +import becca = require('../becca/becca'); +import utils = require('./utils'); +import BEtapiToken = require('../becca/entities/betapi_token'); +import crypto = require("crypto"); function getTokens() { return becca.getEtapiTokens(); } -function getTokenHash(token) { +function getTokenHash(token: crypto.BinaryLike) { return crypto.createHash('sha256').update(token).digest('base64'); } -function createToken(tokenName) { +function createToken(tokenName: string) { const token = utils.randomSecureToken(32); const tokenHash = getTokenHash(token); @@ -25,7 +25,7 @@ function createToken(tokenName) { }; } -function parseAuthToken(auth) { +function parseAuthToken(auth: string | undefined) { if (!auth) { return null; } @@ -64,7 +64,7 @@ function parseAuthToken(auth) { } } -function isValidAuthHeader(auth) { +function isValidAuthHeader(auth: string | undefined) { const parsed = parseAuthToken(auth); if (!parsed) { @@ -93,7 +93,7 @@ function isValidAuthHeader(auth) { } } -function renameToken(etapiTokenId, newName) { +function renameToken(etapiTokenId: string, newName: string) { const etapiToken = becca.getEtapiToken(etapiTokenId); if (!etapiToken) { @@ -104,7 +104,7 @@ function renameToken(etapiTokenId, newName) { etapiToken.save(); } -function deleteToken(etapiTokenId) { +function deleteToken(etapiTokenId: string) { const etapiToken = becca.getEtapiToken(etapiTokenId); if (!etapiToken) { @@ -114,7 +114,7 @@ function deleteToken(etapiTokenId) { etapiToken.markAsDeletedSimple(); } -module.exports = { +export = { getTokens, createToken, renameToken, diff --git a/src/services/export/zip.js b/src/services/export/zip.js index 8fd8fd896..e0204891d 100644 --- a/src/services/export/zip.js +++ b/src/services/export/zip.js @@ -16,9 +16,9 @@ const archiver = require('archiver'); const log = require('../log'); const TaskContext = require('../task_context'); const ValidationError = require('../../errors/validation_error'); -const NoteMeta = require('../meta/note_meta.js'); -const AttachmentMeta = require('../meta/attachment_meta.js'); -const AttributeMeta = require('../meta/attribute_meta.js'); +const NoteMeta = require('../meta/note_meta'); +const AttachmentMeta = require('../meta/attachment_meta'); +const AttributeMeta = require('../meta/attribute_meta'); /** * @param {TaskContext} taskContext diff --git a/src/services/handlers.js b/src/services/handlers.js index 1a9c8e353..a24e28dd6 100644 --- a/src/services/handlers.js +++ b/src/services/handlers.js @@ -1,10 +1,10 @@ const eventService = require('./events'); const scriptService = require('./script.js'); const treeService = require('./tree.js'); -const noteService = require('./notes.js'); +const noteService = require('./notes'); const becca = require('../becca/becca'); const BAttribute = require('../becca/entities/battribute'); -const hiddenSubtreeService = require('./hidden_subtree.js'); +const hiddenSubtreeService = require('./hidden_subtree'); const oneTimeTimer = require('./one_time_timer.js'); function runAttachedRelations(note, relationName, originEntity) { diff --git a/src/services/hidden_subtree.js b/src/services/hidden_subtree.ts similarity index 93% rename from src/services/hidden_subtree.js rename to src/services/hidden_subtree.ts index 6976fcab9..5478aed1c 100644 --- a/src/services/hidden_subtree.js +++ b/src/services/hidden_subtree.ts @@ -1,8 +1,10 @@ -const becca = require('../becca/becca'); -const noteService = require('./notes.js'); -const BAttribute = require('../becca/entities/battribute'); -const log = require('./log'); -const migrationService = require('./migration.js'); +import BAttribute = require("../becca/entities/battribute"); +import { AttributeType, NoteType } from "../becca/entities/rows"; + +import becca = require('../becca/becca'); +import noteService = require('./notes'); +import log = require('./log'); +import migrationService = require('./migration'); const LBTPL_ROOT = "_lbTplRoot"; const LBTPL_BASE = "_lbTplBase"; @@ -13,13 +15,36 @@ const LBTPL_BUILTIN_WIDGET = "_lbTplBuiltinWidget"; const LBTPL_SPACER = "_lbTplSpacer"; const LBTPL_CUSTOM_WIDGET = "_lbTplCustomWidget"; +interface Attribute { + type: AttributeType; + name: string; + isInheritable?: boolean; + value?: string +} + +interface Item { + notePosition?: number; + id: string; + title: string; + type: NoteType; + icon?: string; + attributes?: Attribute[]; + children?: Item[]; + isExpanded?: boolean; + baseSize?: string; + growthFactor?: string; + targetNoteId?: "_backendLog" | "_globalNoteMap"; + builtinWidget?: "bookmarks" | "spacer" | "backInHistoryButton" | "forwardInHistoryButton" | "syncStatus" | "protectedSession" | "todayInJournal" | "calendar"; + command?: "jumpToNote" | "searchNotes" | "createNoteIntoInbox" | "showRecentChanges"; +} + /* * Hidden subtree is generated as a "predictable structure" which means that it avoids generating random IDs to always * produce the same structure. This is needed because it is run on multiple instances in the sync cluster which might produce * duplicate subtrees. This way, all instances will generate the same structure with the same IDs. */ -const HIDDEN_SUBTREE_DEFINITION = { +const HIDDEN_SUBTREE_DEFINITION: Item = { id: '_hidden', title: 'Hidden Notes', type: 'doc', @@ -244,7 +269,7 @@ function checkHiddenSubtree(force = false) { checkHiddenSubtreeRecursively('root', HIDDEN_SUBTREE_DEFINITION); } -function checkHiddenSubtreeRecursively(parentNoteId, item) { +function checkHiddenSubtreeRecursively(parentNoteId: string, item: Item) { if (!item.id || !item.type || !item.title) { throw new Error(`Item does not contain mandatory properties: ${JSON.stringify(item)}`); } @@ -337,7 +362,7 @@ function checkHiddenSubtreeRecursively(parentNoteId, item) { } } -module.exports = { +export = { checkHiddenSubtree, LBTPL_ROOT, LBTPL_BASE, diff --git a/src/services/html_sanitizer.js b/src/services/html_sanitizer.ts similarity index 87% rename from src/services/html_sanitizer.js rename to src/services/html_sanitizer.ts index 99bb567ad..8e82edd3c 100644 --- a/src/services/html_sanitizer.js +++ b/src/services/html_sanitizer.ts @@ -1,18 +1,18 @@ -const sanitizeHtml = require('sanitize-html'); -const sanitizeUrl = require('@braintree/sanitize-url').sanitizeUrl; +import sanitizeHtml = require('sanitize-html'); +import sanitizeUrl = require('@braintree/sanitize-url'); // intended mainly as protection against XSS via import // secondarily, it (partly) protects against "CSS takeover" // sanitize also note titles, label values etc. - there are so many usages which make it difficult // to guarantee all of them are properly handled -function sanitize(dirtyHtml) { +function sanitize(dirtyHtml: string) { if (!dirtyHtml) { return dirtyHtml; } // avoid H1 per https://github.com/zadam/trilium/issues/1552 // demote H1, and if that conflicts with existing H2, demote that, etc - const transformTags = {}; + const transformTags: Record = {}; const lowercasedHtml = dirtyHtml.toLowerCase(); for (let i = 1; i < 6; ++i) { if (lowercasedHtml.includes(` { - return sanitizeUrl(url).trim(); + sanitizeUrl: (url: string) => { + return sanitizeUrl.sanitizeUrl(url).trim(); } }; diff --git a/src/services/image.js b/src/services/image.js index 04add20f8..26c2fbdd3 100644 --- a/src/services/image.js +++ b/src/services/image.js @@ -3,7 +3,7 @@ const becca = require('../becca/becca'); const log = require('./log'); const protectedSessionService = require('./protected_session'); -const noteService = require('./notes.js'); +const noteService = require('./notes'); const optionService = require('./options'); const sql = require('./sql'); const jimp = require('jimp'); @@ -11,7 +11,7 @@ const imageType = require('image-type'); const sanitizeFilename = require('sanitize-filename'); const isSvg = require('is-svg'); const isAnimated = require('is-animated'); -const htmlSanitizer = require('./html_sanitizer.js'); +const htmlSanitizer = require('./html_sanitizer'); async function processImage(uploadBuffer, originalName, shrinkImageSwitch) { const compressImages = optionService.getOptionBool("compressImages"); @@ -154,7 +154,7 @@ function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSw setTimeout(() => { sql.transactional(() => { const note = becca.getNoteOrThrow(noteId); - const noteService = require('../services/notes.js'); + const noteService = require('../services/notes'); noteService.asyncPostProcessContent(note, note.getContent()); // to mark an unused attachment for deletion }); }, 5000); diff --git a/src/services/import/enex.js b/src/services/import/enex.js index 7cc8973ce..c8379b951 100644 --- a/src/services/import/enex.js +++ b/src/services/import/enex.js @@ -4,10 +4,10 @@ const {Throttle} = require('stream-throttle'); const log = require('../log'); const utils = require('../utils'); const sql = require('../sql'); -const noteService = require('../notes.js'); +const noteService = require('../notes'); const imageService = require('../image.js'); const protectedSessionService = require('../protected_session'); -const htmlSanitizer = require('../html_sanitizer.js'); +const htmlSanitizer = require('../html_sanitizer'); const {sanitizeAttributeName} = require('../sanitize_attribute_name'); /** diff --git a/src/services/import/markdown.js b/src/services/import/markdown.js index 7cdd6b3d2..745f30847 100644 --- a/src/services/import/markdown.js +++ b/src/services/import/markdown.js @@ -1,7 +1,7 @@ "use strict"; const marked = require("marked"); -const htmlSanitizer = require('../html_sanitizer.js'); +const htmlSanitizer = require('../html_sanitizer'); const importUtils = require('./utils'); function renderToHtml(content, title) { diff --git a/src/services/import/opml.js b/src/services/import/opml.js index a547bf7ad..2b99dbd85 100644 --- a/src/services/import/opml.js +++ b/src/services/import/opml.js @@ -1,9 +1,9 @@ "use strict"; -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const parseString = require('xml2js').parseString; const protectedSessionService = require('../protected_session'); -const htmlSanitizer = require('../html_sanitizer.js'); +const htmlSanitizer = require('../html_sanitizer'); /** * @param {TaskContext} taskContext diff --git a/src/services/import/single.js b/src/services/import/single.js index 5e7c92630..ec7aa735d 100644 --- a/src/services/import/single.js +++ b/src/services/import/single.js @@ -1,13 +1,13 @@ "use strict"; -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const imageService = require('../../services/image.js'); const protectedSessionService = require('../protected_session'); const markdownService = require('./markdown.js'); const mimeService = require('./mime.js'); const utils = require('../../services/utils'); const importUtils = require('./utils'); -const htmlSanitizer = require('../html_sanitizer.js'); +const htmlSanitizer = require('../html_sanitizer'); function importSingleFile(taskContext, file, parentNote) { const mime = mimeService.getMime(file.originalname) || file.mimetype; diff --git a/src/services/import/zip.js b/src/services/import/zip.js index cf6e870ac..ee7a828d1 100644 --- a/src/services/import/zip.js +++ b/src/services/import/zip.js @@ -3,7 +3,7 @@ const BAttribute = require('../../becca/entities/battribute'); const utils = require('../../services/utils'); const log = require('../../services/log'); -const noteService = require('../../services/notes.js'); +const noteService = require('../../services/notes'); const attributeService = require('../../services/attributes.js'); const BBranch = require('../../becca/entities/bbranch'); const path = require('path'); @@ -11,7 +11,7 @@ const protectedSessionService = require('../protected_session'); const mimeService = require('./mime.js'); const treeService = require('../tree.js'); const yauzl = require("yauzl"); -const htmlSanitizer = require('../html_sanitizer.js'); +const htmlSanitizer = require('../html_sanitizer'); const becca = require('../../becca/becca'); const BAttachment = require('../../becca/entities/battachment'); const markdownService = require('./markdown.js'); diff --git a/src/services/keyboard_actions.js b/src/services/keyboard_actions.ts similarity index 96% rename from src/services/keyboard_actions.js rename to src/services/keyboard_actions.ts index 6ad590f07..b2cbaad29 100644 --- a/src/services/keyboard_actions.js +++ b/src/services/keyboard_actions.ts @@ -1,12 +1,21 @@ "use strict"; -const optionService = require('./options'); -const log = require('./log'); -const utils = require('./utils'); +import optionService = require('./options'); +import log = require('./log'); +import utils = require('./utils'); const isMac = process.platform === "darwin"; const isElectron = utils.isElectron(); +interface KeyboardShortcut { + separator?: string; + actionName?: string; + description?: string; + defaultShortcuts?: string[]; + effectiveShortcuts?: string[]; + scope?: string; +} + /** * Scope here means on which element the keyboard shortcuts are attached - this means that for the shortcut to work, * the focus has to be inside the element. @@ -16,7 +25,7 @@ const isElectron = utils.isElectron(); * e.g. CTRL-C in note tree does something a bit different from CTRL-C in the text editor. */ -const DEFAULT_KEYBOARD_ACTIONS = [ +const DEFAULT_KEYBOARD_ACTIONS: KeyboardShortcut[] = [ { separator: "Note navigation" }, @@ -606,15 +615,15 @@ for (const action of DEFAULT_KEYBOARD_ACTIONS) { } function getKeyboardActions() { - const actions = JSON.parse(JSON.stringify(DEFAULT_KEYBOARD_ACTIONS)); + const actions: KeyboardShortcut[] = JSON.parse(JSON.stringify(DEFAULT_KEYBOARD_ACTIONS)); for (const action of actions) { - action.effectiveShortcuts = action.effectiveShortcuts ? action.defaultShortcuts.slice() : []; + action.effectiveShortcuts = action.defaultShortcuts ? action.defaultShortcuts.slice() : []; } for (const option of optionService.getOptions()) { if (option.name.startsWith('keyboardShortcuts')) { - let actionName = option.name.substr(17); + let actionName = option.name.substring(17); actionName = actionName.charAt(0).toLowerCase() + actionName.slice(1); const action = actions.find(ea => ea.actionName === actionName); @@ -636,7 +645,7 @@ function getKeyboardActions() { return actions; } -module.exports = { +export = { DEFAULT_KEYBOARD_ACTIONS, getKeyboardActions }; diff --git a/src/services/meta/attachment_meta.js b/src/services/meta/attachment_meta.js deleted file mode 100644 index 067a4a336..000000000 --- a/src/services/meta/attachment_meta.js +++ /dev/null @@ -1,16 +0,0 @@ -class AttachmentMeta { - /** @type {string} */ - attachmentId; - /** @type {string} */ - title; - /** @type {string} */ - role; - /** @type {string} */ - mime; - /** @type {int} */ - position; - /** @type {string} */ - dataFileName; -} - -module.exports = AttachmentMeta; diff --git a/src/services/meta/attachment_meta.ts b/src/services/meta/attachment_meta.ts new file mode 100644 index 000000000..b84830591 --- /dev/null +++ b/src/services/meta/attachment_meta.ts @@ -0,0 +1,10 @@ +interface AttachmentMeta { + attachmentId: string; + title: string; + role: string; + mime: string; + position: number; + dataFileName: string; +} + +export = AttachmentMeta; diff --git a/src/services/meta/attribute_meta.js b/src/services/meta/attribute_meta.js deleted file mode 100644 index 3d50cb585..000000000 --- a/src/services/meta/attribute_meta.js +++ /dev/null @@ -1,14 +0,0 @@ -class AttributeMeta { - /** @type {string} */ - type; - /** @type {string} */ - name; - /** @type {string} */ - value; - /** @type {boolean} */ - isInheritable; - /** @type {int} */ - position; -} - -module.exports = AttributeMeta; diff --git a/src/services/meta/attribute_meta.ts b/src/services/meta/attribute_meta.ts new file mode 100644 index 000000000..319295944 --- /dev/null +++ b/src/services/meta/attribute_meta.ts @@ -0,0 +1,9 @@ +interface AttributeMeta { + type: string; + name: string; + value: string; + isInheritable: boolean; + position: number; +} + +export = AttributeMeta; diff --git a/src/services/meta/note_meta.js b/src/services/meta/note_meta.js deleted file mode 100644 index fd24381d6..000000000 --- a/src/services/meta/note_meta.js +++ /dev/null @@ -1,36 +0,0 @@ -class NoteMeta { - /** @type {string} */ - noteId; - /** @type {string} */ - notePath; - /** @type {boolean} */ - isClone; - /** @type {string} */ - title; - /** @type {int} */ - notePosition; - /** @type {string} */ - prefix; - /** @type {boolean} */ - isExpanded; - /** @type {string} */ - type; - /** @type {string} */ - mime; - /** @type {string} - 'html' or 'markdown', applicable to text notes only */ - format; - /** @type {string} */ - dataFileName; - /** @type {string} */ - dirFileName; - /** @type {boolean} - this file should not be imported (e.g., HTML navigation) */ - noImport = false; - /** @type {AttributeMeta[]} */ - attributes; - /** @type {AttachmentMeta[]} */ - attachments; - /** @type {NoteMeta[]|undefined} */ - children; -} - -module.exports = NoteMeta; diff --git a/src/services/meta/note_meta.ts b/src/services/meta/note_meta.ts new file mode 100644 index 000000000..3492c9c63 --- /dev/null +++ b/src/services/meta/note_meta.ts @@ -0,0 +1,24 @@ +import AttributeMeta = require("./attribute_meta"); + +interface NoteMeta { + noteId: string; + notePath: string; + isClone: boolean; + title: string; + notePosition: number; + prefix: string; + isExpanded: boolean; + type: string; + mime: string; + /** 'html' or 'markdown', applicable to text notes only */ + format: "html" | "markdown"; + dataFileName: string; + dirFileName: string; + /** this file should not be imported (e.g., HTML navigation) */ + noImport: boolean; + attributes: AttributeMeta[]; + attachments: AttributeMeta[]; + children?: NoteMeta[]; +} + +export = NoteMeta; diff --git a/src/services/migration.js b/src/services/migration.ts similarity index 84% rename from src/services/migration.js rename to src/services/migration.ts index 3b6caa251..ca647c539 100644 --- a/src/services/migration.js +++ b/src/services/migration.ts @@ -1,11 +1,18 @@ -const backupService = require('./backup.js'); -const sql = require('./sql'); -const fs = require('fs-extra'); -const log = require('./log'); -const utils = require('./utils'); -const resourceDir = require('./resource_dir'); -const appInfo = require('./app_info.js'); -const cls = require('./cls'); +import backupService = require('./backup'); +import sql = require('./sql'); +import fs = require('fs-extra'); +import log = require('./log'); +import utils = require('./utils'); +import resourceDir = require('./resource_dir'); +import appInfo = require('./app_info'); +import cls = require('./cls'); + +interface MigrationInfo { + dbVersion: number; + name: string; + file: string; + type: string; +} async function migrate() { const currentDbVersion = getDbVersion(); @@ -25,7 +32,12 @@ async function migrate() { : 'before-migration' ); - const migrations = fs.readdirSync(resourceDir.MIGRATIONS_DIR).map(file => { + const migrationFiles = fs.readdirSync(resourceDir.MIGRATIONS_DIR); + if (migrationFiles == null) { + return; + } + + const migrations = migrationFiles.map(file => { const match = file.match(/^([0-9]{4})__([a-zA-Z0-9_ ]+)\.(sql|js)$/); if (!match) { return null; @@ -45,7 +57,7 @@ async function migrate() { } else { return null; } - }).filter(el => !!el); + }).filter((el): el is MigrationInfo => !!el); migrations.sort((a, b) => a.dbVersion - b.dbVersion); @@ -67,7 +79,7 @@ async function migrate() { WHERE name = ?`, [mig.dbVersion.toString(), "dbVersion"]); log.info(`Migration to version ${mig.dbVersion} has been successful.`); - } catch (e) { + } catch (e: any) { log.error(`error during migration to version ${mig.dbVersion}: ${e.stack}`); log.error("migration failed, crashing hard"); // this is not very user-friendly :-/ @@ -84,7 +96,7 @@ async function migrate() { } } -function executeMigration(mig) { +function executeMigration(mig: MigrationInfo) { if (mig.type === 'sql') { const migrationSql = fs.readFileSync(`${resourceDir.MIGRATIONS_DIR}/${mig.file}`).toString('utf8'); @@ -131,7 +143,7 @@ async function migrateIfNecessary() { } } -module.exports = { +export = { migrateIfNecessary, isDbUpToDate }; diff --git a/src/services/note_types.js b/src/services/note_types.ts similarity index 93% rename from src/services/note_types.js rename to src/services/note_types.ts index f29bf8ab4..54d9d1f44 100644 --- a/src/services/note_types.js +++ b/src/services/note_types.ts @@ -16,7 +16,7 @@ const noteTypes = [ { type: 'contentWidget', defaultMime: '' } ]; -function getDefaultMimeForNoteType(typeName) { +function getDefaultMimeForNoteType(typeName: string) { const typeRec = noteTypes.find(nt => nt.type === typeName); if (!typeRec) { @@ -26,7 +26,7 @@ function getDefaultMimeForNoteType(typeName) { return typeRec.defaultMime; } -module.exports = { +export = { getNoteTypeNames: () => noteTypes.map(nt => nt.type), getDefaultMimeForNoteType }; diff --git a/src/services/notes.js b/src/services/notes.ts similarity index 81% rename from src/services/notes.js rename to src/services/notes.ts index 47fc0ab32..a402a2f16 100644 --- a/src/services/notes.js +++ b/src/services/notes.ts @@ -1,52 +1,62 @@ -const sql = require('./sql'); -const optionService = require('./options'); -const dateUtils = require('./date_utils'); -const entityChangesService = require('./entity_changes'); -const eventService = require('./events'); -const cls = require('../services/cls'); -const protectedSessionService = require('../services/protected_session'); -const log = require('../services/log'); -const utils = require('../services/utils'); -const revisionService = require('./revisions.js'); -const request = require('./request.js'); -const path = require('path'); -const url = require('url'); -const becca = require('../becca/becca'); -const BBranch = require('../becca/entities/bbranch'); -const BNote = require('../becca/entities/bnote'); -const BAttribute = require('../becca/entities/battribute'); -const BAttachment = require('../becca/entities/battachment'); -const dayjs = require("dayjs"); -const htmlSanitizer = require('./html_sanitizer.js'); -const ValidationError = require('../errors/validation_error'); -const noteTypesService = require('./note_types.js'); -const fs = require("fs"); -const ws = require('./ws'); -const html2plaintext = require('html2plaintext') +import sql = require('./sql'); +import optionService = require('./options'); +import dateUtils = require('./date_utils'); +import entityChangesService = require('./entity_changes'); +import eventService = require('./events'); +import cls = require('../services/cls'); +import protectedSessionService = require('../services/protected_session'); +import log = require('../services/log'); +import utils = require('../services/utils'); +import revisionService = require('./revisions'); +import request = require('./request'); +import path = require('path'); +import url = require('url'); +import becca = require('../becca/becca'); +import BBranch = require('../becca/entities/bbranch'); +import BNote = require('../becca/entities/bnote'); +import BAttribute = require('../becca/entities/battribute'); +import BAttachment = require('../becca/entities/battachment'); +import dayjs = require("dayjs"); +import htmlSanitizer = require('./html_sanitizer'); +import ValidationError = require('../errors/validation_error'); +import noteTypesService = require('./note_types'); +import fs = require("fs"); +import ws = require('./ws'); +import html2plaintext = require('html2plaintext'); +import { AttachmentRow, AttributeRow, BranchRow, NoteRow, NoteType } from '../becca/entities/rows'; +import TaskContext = require('./task_context'); -/** @param {BNote} parentNote */ -function getNewNotePosition(parentNote) { +interface FoundLink { + name: "imageLink" | "internalLink" | "includeNoteLink" | "relationMapLink", + value: string +} + +interface Attachment { + attachmentId?: string; + title: string; +} + +function getNewNotePosition(parentNote: BNote) { if (parentNote.isLabelTruthy('newNotesOnTop')) { const minNotePos = parentNote.getChildBranches() - .filter(branch => branch.noteId !== '_hidden') // has "always last" note position - .reduce((min, note) => Math.min(min, note.notePosition), 0); + .filter(branch => branch?.noteId !== '_hidden') // has "always last" note position + .reduce((min, note) => Math.min(min, note?.notePosition || 0), 0); return minNotePos - 10; } else { const maxNotePos = parentNote.getChildBranches() - .filter(branch => branch.noteId !== '_hidden') // has "always last" note position - .reduce((max, note) => Math.max(max, note.notePosition), 0); + .filter(branch => branch?.noteId !== '_hidden') // has "always last" note position + .reduce((max, note) => Math.max(max, note?.notePosition || 0), 0); return maxNotePos + 10; } } -/** @param {BNote} note */ -function triggerNoteTitleChanged(note) { +function triggerNoteTitleChanged(note: BNote) { eventService.emit(eventService.NOTE_TITLE_CHANGED, note); } -function deriveMime(type, mime) { +function deriveMime(type: string, mime?: string) { if (!type) { throw new Error(`Note type is a required param`); } @@ -58,11 +68,7 @@ function deriveMime(type, mime) { return noteTypesService.getDefaultMimeForNoteType(type); } -/** - * @param {BNote} parentNote - * @param {BNote} childNote - */ -function copyChildAttributes(parentNote, childNote) { +function copyChildAttributes(parentNote: BNote, childNote: BNote) { for (const attr of parentNote.getAttributes()) { if (attr.name.startsWith("child:")) { const name = attr.name.substr(6); @@ -86,8 +92,7 @@ function copyChildAttributes(parentNote, childNote) { } } -/** @param {BNote} parentNote */ -function getNewNoteTitle(parentNote) { +function getNewNoteTitle(parentNote: BNote) { let title = "new note"; const titleTemplate = parentNote.getLabelValue('titleTemplate'); @@ -101,7 +106,7 @@ function getNewNoteTitle(parentNote) { // - parentNote title = eval(`\`${titleTemplate}\``); - } catch (e) { + } catch (e: any) { log.error(`Title template of note '${parentNote.noteId}' failed with: ${e.message}`); } } @@ -114,7 +119,13 @@ function getNewNoteTitle(parentNote) { return title; } -function getAndValidateParent(params) { +interface GetValidateParams { + parentNoteId: string; + type: string; + ignoreForbiddenParents?: boolean; +} + +function getAndValidateParent(params: GetValidateParams) { const parentNote = becca.notes[params.parentNoteId]; if (!parentNote) { @@ -141,24 +152,33 @@ function getAndValidateParent(params) { return parentNote; } -/** - * Following object properties are mandatory: - * - {string} parentNoteId - * - {string} title - * - {*} content - * - {string} type - text, code, file, image, search, book, relationMap, canvas, render - * - * The following are optional (have defaults) - * - {string} mime - value is derived from default mimes for type - * - {boolean} isProtected - default is false - * - {boolean} isExpanded - default is false - * - {string} prefix - default is empty string - * - {int} notePosition - default is the last existing notePosition in a parent + 10 - * - * @param params - * @returns {{note: BNote, branch: BBranch}} - */ -function createNewNote(params) { +interface NoteParams { + /** optionally can force specific noteId */ + noteId?: string; + parentNoteId: string; + templateNoteId?: string; + title: string; + content: string; + type: NoteType; + /** default value is derived from default mimes for type */ + mime?: string; + /** default is false */ + isProtected?: boolean; + /** default is false */ + isExpanded?: boolean; + /** default is empty string */ + prefix?: string; + /** default is the last existing notePosition in a parent + 10 */ + notePosition?: number; + dateCreated?: string; + utcDateCreated?: string; + ignoreForbiddenParents?: boolean; +} + +function createNewNote(params: NoteParams): { + note: BNote; + branch: BBranch; +} { const parentNote = getAndValidateParent(params); if (params.title === null || params.title === undefined) { @@ -209,7 +229,7 @@ function createNewNote(params) { noteId: note.noteId, parentNoteId: params.parentNoteId, notePosition: params.notePosition !== undefined ? params.notePosition : getNewNotePosition(parentNote), - prefix: params.prefix, + prefix: params.prefix || "", isExpanded: !!params.isExpanded }).save(); } @@ -253,7 +273,7 @@ function createNewNote(params) { }); } -function createNewNoteWithTarget(target, targetBranchId, params) { +function createNewNoteWithTarget(target: ("into" | "after"), targetBranchId: string, params: NoteParams) { if (!params.type) { const parentNote = becca.notes[params.parentNoteId]; @@ -285,13 +305,7 @@ function createNewNoteWithTarget(target, targetBranchId, params) { } } -/** - * @param {BNote} note - * @param {boolean} protect - * @param {boolean} includingSubTree - * @param {TaskContext} taskContext - */ -function protectNoteRecursively(note, protect, includingSubTree, taskContext) { +function protectNoteRecursively(note: BNote, protect: boolean, includingSubTree: boolean, taskContext: TaskContext) { protectNote(note, protect); taskContext.increaseProgressCount(); @@ -303,11 +317,7 @@ function protectNoteRecursively(note, protect, includingSubTree, taskContext) { } } -/** - * @param {BNote} note - * @param {boolean} protect - */ -function protectNote(note, protect) { +function protectNote(note: BNote, protect: boolean) { if (!protectedSessionService.isProtectedSessionAvailable()) { throw new Error(`Cannot (un)protect note '${note.noteId}' with protect flag '${protect}' without active protected session`); } @@ -345,8 +355,8 @@ function protectNote(note, protect) { } } -function checkImageAttachments(note, content) { - const foundAttachmentIds = new Set(); +function checkImageAttachments(note: BNote, content: string) { + const foundAttachmentIds = new Set(); let match; const imgRegExp = /src="[^"]*api\/attachments\/([a-zA-Z0-9_]+)\/image/g; @@ -362,7 +372,7 @@ function checkImageAttachments(note, content) { const attachments = note.getAttachments(); for (const attachment of attachments) { - const attachmentInContent = foundAttachmentIds.has(attachment.attachmentId); + const attachmentInContent = attachment.attachmentId && foundAttachmentIds.has(attachment.attachmentId); if (attachment.utcDateScheduledForErasureSince && attachmentInContent) { attachment.utcDateScheduledForErasureSince = null; @@ -373,7 +383,7 @@ function checkImageAttachments(note, content) { } } - const existingAttachmentIds = new Set(attachments.map(att => att.attachmentId)); + const existingAttachmentIds = new Set(attachments.map(att => att.attachmentId)); const unknownAttachmentIds = Array.from(foundAttachmentIds).filter(foundAttId => !existingAttachmentIds.has(foundAttId)); const unknownAttachments = becca.getAttachments(unknownAttachmentIds); @@ -412,7 +422,7 @@ function checkImageAttachments(note, content) { }; } -function findImageLinks(content, foundLinks) { +function findImageLinks(content: string, foundLinks: FoundLink[]) { const re = /src="[^"]*api\/images\/([a-zA-Z0-9_]+)\//g; let match; @@ -428,7 +438,7 @@ function findImageLinks(content, foundLinks) { return content.replace(/src="[^"]*\/api\/images\//g, 'src="api/images/'); } -function findInternalLinks(content, foundLinks) { +function findInternalLinks(content: string, foundLinks: FoundLink[]) { const re = /href="[^"]*#root[a-zA-Z0-9_\/]*\/([a-zA-Z0-9_]+)\/?"/g; let match; @@ -443,7 +453,7 @@ function findInternalLinks(content, foundLinks) { return content.replace(/href="[^"]*#root/g, 'href="#root'); } -function findIncludeNoteLinks(content, foundLinks) { +function findIncludeNoteLinks(content: string, foundLinks: FoundLink[]) { const re = /
]*>/g; let match; @@ -457,7 +467,7 @@ function findIncludeNoteLinks(content, foundLinks) { return content; } -function findRelationMapLinks(content, foundLinks) { +function findRelationMapLinks(content: string, foundLinks: FoundLink[]) { const obj = JSON.parse(content); for (const note of obj.notes) { @@ -468,9 +478,9 @@ function findRelationMapLinks(content, foundLinks) { } } -const imageUrlToAttachmentIdMapping = {}; +const imageUrlToAttachmentIdMapping: Record = {}; -async function downloadImage(noteId, imageUrl) { +async function downloadImage(noteId: string, imageUrl: string) { const unescapedUrl = utils.unescapeHtml(imageUrl); try { @@ -493,7 +503,7 @@ async function downloadImage(noteId, imageUrl) { } const parsedUrl = url.parse(unescapedUrl); - const title = path.basename(parsedUrl.pathname); + const title = path.basename(parsedUrl.pathname || ""); const imageService = require('../services/image.js'); const attachment = imageService.saveImageToAttachment(noteId, imageBuffer, title, true, true); @@ -502,21 +512,21 @@ async function downloadImage(noteId, imageUrl) { log.info(`Download of '${imageUrl}' succeeded and was saved as image attachment '${attachment.attachmentId}' of note '${noteId}'`); } - catch (e) { + catch (e: any) { log.error(`Download of '${imageUrl}' for note '${noteId}' failed with error: ${e.message} ${e.stack}`); } } /** url => download promise */ -const downloadImagePromises = {}; +const downloadImagePromises: Record> = {}; -function replaceUrl(content, url, attachment) { +function replaceUrl(content: string, url: string, attachment: Attachment) { const quotedUrl = utils.quoteRegex(url); return content.replace(new RegExp(`\\s+src=[\"']${quotedUrl}[\"']`, "ig"), ` src="api/attachments/${attachment.attachmentId}/image/${encodeURIComponent(attachment.title)}"`); } -function downloadImages(noteId, content) { +function downloadImages(noteId: string, content: string) { const imageRe = /]*?\ssrc=['"]([^'">]+)['"]/ig; let imageMatch; @@ -589,6 +599,11 @@ function downloadImages(noteId, content) { const origContent = origNote.getContent(); let updatedContent = origContent; + if (typeof updatedContent !== "string") { + log.error(`Note '${noteId}' has a non-string content, cannot replace image link.`); + return; + } + for (const url in imageUrlToAttachmentIdMapping) { const imageNote = imageNotes.find(note => note.noteId === imageUrlToAttachmentIdMapping[url]); @@ -612,11 +627,7 @@ function downloadImages(noteId, content) { return content; } -/** - * @param {BNote} note - * @param {string} content - */ -function saveAttachments(note, content) { +function saveAttachments(note: BNote, content: string) { const inlineAttachmentRe = /]*?\shref=['"]data:([^;'">]+);base64,([^'">]+)['"][^>]*>(.*?)<\/a>/igm; let attachmentMatch; @@ -645,11 +656,7 @@ function saveAttachments(note, content) { return content; } -/** - * @param {BNote} note - * @param {string} content - */ -function saveLinks(note, content) { +function saveLinks(note: BNote, content: string) { if ((note.type !== 'text' && note.type !== 'relationMap') || (note.isProtected && !protectedSessionService.isProtectedSessionAvailable())) { return { @@ -658,7 +665,7 @@ function saveLinks(note, content) { }; } - const foundLinks = []; + const foundLinks: FoundLink[] = []; let forceFrontendReload = false; if (note.type === 'text') { @@ -716,8 +723,7 @@ function saveLinks(note, content) { return { forceFrontendReload, content }; } -/** @param {BNote} note */ -function saveRevisionIfNeeded(note) { +function saveRevisionIfNeeded(note: BNote) { // files and images are versioned separately if (note.type === 'file' || note.type === 'image' || note.isLabelTruthy('disableVersioning')) { return; @@ -738,10 +744,10 @@ function saveRevisionIfNeeded(note) { } } -function updateNoteData(noteId, content, attachments = []) { +function updateNoteData(noteId: string, content: string, attachments: AttachmentRow[] = []) { const note = becca.getNote(noteId); - if (!note.isContentAvailable()) { + if (!note || !note.isContentAvailable()) { throw new Error(`Note '${noteId}' is not available for change!`); } @@ -752,10 +758,9 @@ function updateNoteData(noteId, content, attachments = []) { note.setContent(newContent, { forceFrontendReload }); if (attachments?.length > 0) { - /** @var {Object} */ const existingAttachmentsByTitle = utils.toMap(note.getAttachments({includeContentLength: false}), 'title'); - for (const {attachmentId, role, mime, title, content, position} of attachments) { + for (const {attachmentId, role, mime, title, position, content} of attachments) { if (attachmentId || !(title in existingAttachmentsByTitle)) { note.saveAttachment({attachmentId, role, mime, title, content, position}); } else { @@ -763,18 +768,16 @@ function updateNoteData(noteId, content, attachments = []) { existingAttachment.role = role; existingAttachment.mime = mime; existingAttachment.position = position; - existingAttachment.setContent(content, {forceSave: true}); + if (content) { + existingAttachment.setContent(content, {forceSave: true}); + } } } } } -/** - * @param {string} noteId - * @param {TaskContext} taskContext - */ -function undeleteNote(noteId, taskContext) { - const noteRow = sql.getRow("SELECT * FROM notes WHERE noteId = ?", [noteId]); +function undeleteNote(noteId: string, taskContext: TaskContext) { + const noteRow = sql.getRow("SELECT * FROM notes WHERE noteId = ?", [noteId]); if (!noteRow.isDeleted) { log.error(`Note '${noteId}' is not deleted and thus cannot be undeleted.`); @@ -793,19 +796,14 @@ function undeleteNote(noteId, taskContext) { } } -/** - * @param {string} branchId - * @param {string} deleteId - * @param {TaskContext} taskContext - */ -function undeleteBranch(branchId, deleteId, taskContext) { - const branchRow = sql.getRow("SELECT * FROM branches WHERE branchId = ?", [branchId]) +function undeleteBranch(branchId: string, deleteId: string, taskContext: TaskContext) { + const branchRow = sql.getRow("SELECT * FROM branches WHERE branchId = ?", [branchId]) if (!branchRow.isDeleted) { return; } - const noteRow = sql.getRow("SELECT * FROM notes WHERE noteId = ?", [branchRow.noteId]); + const noteRow = sql.getRow("SELECT * FROM notes WHERE noteId = ?", [branchRow.noteId]); if (noteRow.isDeleted && noteRow.deleteId !== deleteId) { return; @@ -818,10 +816,14 @@ function undeleteBranch(branchId, deleteId, taskContext) { if (noteRow.isDeleted && noteRow.deleteId === deleteId) { // becca entity was already created as skeleton in "new Branch()" above const noteEntity = becca.getNote(noteRow.noteId); + if (!noteEntity) { + throw new Error("Unable to find the just restored branch."); + } + noteEntity.updateFromRow(noteRow); noteEntity.save(); - const attributeRows = sql.getRows(` + const attributeRows = sql.getRows(` SELECT * FROM attributes WHERE isDeleted = 1 AND deleteId = ? @@ -830,10 +832,11 @@ function undeleteBranch(branchId, deleteId, taskContext) { for (const attributeRow of attributeRows) { // relation might point to a note which hasn't been undeleted yet and would thus throw up + // TODO: skipValidation is not used. new BAttribute(attributeRow).save({skipValidation: true}); } - const attachmentRows = sql.getRows(` + const attachmentRows = sql.getRows(` SELECT * FROM attachments WHERE isDeleted = 1 AND deleteId = ? @@ -843,7 +846,7 @@ function undeleteBranch(branchId, deleteId, taskContext) { new BAttachment(attachmentRow).save(); } - const childBranchIds = sql.getColumn(` + const childBranchIds = sql.getColumn(` SELECT branches.branchId FROM branches WHERE branches.isDeleted = 1 @@ -859,8 +862,8 @@ function undeleteBranch(branchId, deleteId, taskContext) { /** * @returns return deleted branchIds of an undeleted parent note */ -function getUndeletedParentBranchIds(noteId, deleteId) { - return sql.getColumn(` +function getUndeletedParentBranchIds(noteId: string, deleteId: string) { + return sql.getColumn(` SELECT branches.branchId FROM branches JOIN notes AS parentNote ON parentNote.noteId = branches.parentNoteId @@ -870,7 +873,7 @@ function getUndeletedParentBranchIds(noteId, deleteId) { AND parentNote.isDeleted = 0`, [noteId, deleteId]); } -function scanForLinks(note, content) { +function scanForLinks(note: BNote, content: string) { if (!note || !['text', 'relationMap'].includes(note.type)) { return; } @@ -884,17 +887,15 @@ function scanForLinks(note, content) { } }); } - catch (e) { + catch (e: any) { log.error(`Could not scan for links note '${note.noteId}': ${e.message} ${e.stack}`); } } /** - * @param {BNote} note - * @param {string} content * Things which have to be executed after updating content, but asynchronously (separate transaction) */ -async function asyncPostProcessContent(note, content) { +async function asyncPostProcessContent(note: BNote, content: string) { if (cls.isMigrationRunning()) { // this is rarely needed for migrations, but can cause trouble by e.g. triggering downloads return; @@ -908,7 +909,7 @@ async function asyncPostProcessContent(note, content) { } // all keys should be replaced by the corresponding values -function replaceByMap(str, mapObj) { +function replaceByMap(str: string, mapObj: Record) { if (!mapObj) { return str; } @@ -918,7 +919,7 @@ function replaceByMap(str, mapObj) { return str.replace(re, matched => mapObj[matched]); } -function duplicateSubtree(origNoteId, newParentNoteId) { +function duplicateSubtree(origNoteId: string, newParentNoteId: string) { if (origNoteId === 'root') { throw new Error('Duplicating root is not possible'); } @@ -931,6 +932,10 @@ function duplicateSubtree(origNoteId, newParentNoteId) { const noteIdMapping = getNoteIdMapping(origNote); + if (!origBranch) { + throw new Error("Unable to find original branch to duplicate."); + } + const res = duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapping); if (!res.note.title.endsWith('(dup)')) { @@ -942,20 +947,25 @@ function duplicateSubtree(origNoteId, newParentNoteId) { return res; } -function duplicateSubtreeWithoutRoot(origNoteId, newNoteId) { +function duplicateSubtreeWithoutRoot(origNoteId: string, newNoteId: string) { if (origNoteId === 'root') { throw new Error('Duplicating root is not possible'); } const origNote = becca.getNote(origNoteId); + if (origNote == null) { + throw new Error("Unable to find note to duplicate."); + } + const noteIdMapping = getNoteIdMapping(origNote); - for (const childBranch of origNote.getChildBranches()) { - duplicateSubtreeInner(childBranch.getNote(), childBranch, newNoteId, noteIdMapping); + if (childBranch) { + duplicateSubtreeInner(childBranch.getNote(), childBranch, newNoteId, noteIdMapping); + } } } -function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapping) { +function duplicateSubtreeInner(origNote: BNote, origBranch: BBranch, newParentNoteId: string, noteIdMapping: Record) { if (origNote.isProtected && !protectedSessionService.isProtectedSessionAvailable()) { throw new Error(`Cannot duplicate note '${origNote.noteId}' because it is protected and protected session is not available. Enter protected session and try again.`); } @@ -981,7 +991,7 @@ function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapp let content = origNote.getContent(); - if (['text', 'relationMap', 'search'].includes(origNote.type)) { + if (typeof content === "string" && ['text', 'relationMap', 'search'].includes(origNote.type)) { // fix links in the content content = replaceByMap(content, noteIdMapping); } @@ -1002,11 +1012,14 @@ function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapp } // the relation targets may not be created yet, the mapping is pre-generated - attr.save({skipValidation: true}); + // TODO: This used to be `attr.save({skipValidation: true});`, but skipValidation is in beforeSaving. + attr.save(); } for (const childBranch of origNote.getChildBranches()) { - duplicateSubtreeInner(childBranch.getNote(), childBranch, newNote.noteId, noteIdMapping); + if (childBranch) { + duplicateSubtreeInner(childBranch.getNote(), childBranch, newNote.noteId, noteIdMapping); + } } return newNote; @@ -1031,8 +1044,8 @@ function duplicateSubtreeInner(origNote, origBranch, newParentNoteId, noteIdMapp } } -function getNoteIdMapping(origNote) { - const noteIdMapping = {}; +function getNoteIdMapping(origNote: BNote) { + const noteIdMapping: Record = {}; // pregenerate new noteIds since we'll need to fix relation references even for not yet created notes for (const origNoteId of origNote.getDescendantNoteIds()) { @@ -1042,7 +1055,7 @@ function getNoteIdMapping(origNote) { return noteIdMapping; } -module.exports = { +export = { createNewNote, createNewNoteWithTarget, updateNoteData, diff --git a/src/services/options_init.js b/src/services/options_init.js index 5e7ca2f10..d601353af 100644 --- a/src/services/options_init.js +++ b/src/services/options_init.js @@ -1,9 +1,9 @@ const optionService = require('./options'); -const appInfo = require('./app_info.js'); +const appInfo = require('./app_info'); const utils = require('./utils'); const log = require('./log'); const dateUtils = require('./date_utils'); -const keyboardActions = require('./keyboard_actions.js'); +const keyboardActions = require('./keyboard_actions'); function initDocumentOptions() { optionService.createOption('documentId', utils.randomSecureToken(16), false); diff --git a/src/services/port.js b/src/services/port.ts similarity index 71% rename from src/services/port.js rename to src/services/port.ts index c57f22a5d..c37992d42 100644 --- a/src/services/port.js +++ b/src/services/port.ts @@ -1,9 +1,9 @@ -const config = require('./config'); -const utils = require('./utils'); -const env = require('./env'); -const dataDir = require('./data_dir'); +import config = require('./config'); +import utils = require('./utils'); +import env = require('./env'); +import dataDir = require('./data_dir'); -function parseAndValidate(portStr, source) { +function parseAndValidate(portStr: string, source: string) { const portNum = parseInt(portStr); if (isNaN(portNum) || portNum < 0 || portNum >= 65536) { @@ -14,7 +14,7 @@ function parseAndValidate(portStr, source) { return portNum; } -let port; +let port: number; if (process.env.TRILIUM_PORT) { port = parseAndValidate(process.env.TRILIUM_PORT, "environment variable TRILIUM_PORT"); @@ -24,4 +24,4 @@ if (process.env.TRILIUM_PORT) { port = parseAndValidate(config['Network']['port'] || '3000', `Network.port in ${dataDir.CONFIG_INI_PATH}`); } -module.exports = port; +export = port; diff --git a/src/services/request.js b/src/services/request.ts similarity index 73% rename from src/services/request.js rename to src/services/request.ts index 88771439b..ad1e6b5c8 100644 --- a/src/services/request.js +++ b/src/services/request.ts @@ -1,40 +1,81 @@ "use strict"; -const utils = require('./utils'); -const log = require('./log'); -const url = require('url'); -const syncOptions = require('./sync_options.js'); +import utils = require('./utils'); +import log = require('./log'); +import url = require('url'); +import syncOptions = require('./sync_options'); // this service provides abstraction over node's HTTP/HTTPS and electron net.client APIs // this allows supporting system proxy -function exec(opts) { - const client = getClient(opts); +interface ExecOpts { + proxy: "noproxy" | null; + method: string; + url: string; + paging?: { + pageCount: number; + pageIndex: number; + requestId: string; + }; + cookieJar?: { + header?: string; + }; + auth?: { + password?: string; + }, + timeout: number; + body: string; +} +interface ClientOpts { + method: string; + url: string; + protocol?: string | null; + host?: string | null; + port?: string | null; + path?: string | null; + timeout?: number; + headers?: Record; + agent?: any; + proxy?: string | null; +} + +type RequestEvent = ("error" | "response" | "abort"); + +interface Request { + on(event: RequestEvent, cb: (e: any) => void): void; + end(payload?: string): void; +} + +interface Client { + request(opts: ClientOpts): Request; +} + +function exec(opts: ExecOpts) { + const client = getClient(opts); + // hack for cases where electron.net does not work, but we don't want to set proxy if (opts.proxy === 'noproxy') { opts.proxy = null; } - if (!opts.paging) { - opts.paging = { - pageCount: 1, - pageIndex: 0, - requestId: 'n/a' - }; - } + const paging = opts.paging || { + pageCount: 1, + pageIndex: 0, + requestId: 'n/a' + }; const proxyAgent = getProxyAgent(opts); const parsedTargetUrl = url.parse(opts.url); return new Promise((resolve, reject) => { try { - const headers = { + const headers: Record = { Cookie: (opts.cookieJar && opts.cookieJar.header) || "", - 'Content-Type': opts.paging.pageCount === 1 ? 'application/json' : 'text/plain', - pageCount: opts.paging.pageCount, - pageIndex: opts.paging.pageIndex, - requestId: opts.paging.requestId + 'Content-Type': paging.pageCount === 1 ? 'application/json' : 'text/plain', + pageCount: paging.pageCount, + pageIndex: paging.pageIndex, + requestId: paging.requestId }; if (opts.auth) { @@ -63,9 +104,9 @@ function exec(opts) { } let responseStr = ''; - let chunks = []; + let chunks: Buffer[] = []; - response.on('data', chunk => chunks.push(chunk)); + response.on('data', (chunk: Buffer) => chunks.push(chunk)); response.on('end', () => { // use Buffer instead of string concatenation to avoid implicit decoding for each chunk @@ -77,7 +118,7 @@ function exec(opts) { const jsonObj = responseStr.trim() ? JSON.parse(responseStr) : null; resolve(jsonObj); - } catch (e) { + } catch (e: any) { log.error(`Failed to deserialize sync response: ${responseStr}`); reject(generateError(opts, e.message)); @@ -89,7 +130,7 @@ function exec(opts) { const jsonObj = JSON.parse(responseStr); errorMessage = jsonObj?.message || ''; - } catch (e) { + } catch (e: any) { errorMessage = responseStr.substr(0, Math.min(responseStr.length, 100)); } @@ -108,15 +149,15 @@ function exec(opts) { request.end(payload); } - catch (e) { + catch (e: any) { reject(generateError(opts, e.message)); } }); } -function getImage(imageUrl) { +function getImage(imageUrl: string) { const proxyConf = syncOptions.getSyncProxy(); - const opts = { + const opts: ClientOpts = { method: 'GET', url: imageUrl, proxy: proxyConf !== "noproxy" ? proxyConf : null @@ -151,15 +192,15 @@ function getImage(imageUrl) { reject(generateError(opts, `${response.statusCode} ${response.statusMessage}`)); } - const chunks = [] + const chunks: Buffer[] = [] - response.on('data', chunk => chunks.push(chunk)); + response.on('data', (chunk: Buffer) => chunks.push(chunk)); response.on('end', () => resolve(Buffer.concat(chunks))); }); request.end(undefined); } - catch (e) { + catch (e: any) { reject(generateError(opts, e.message)); } }); @@ -167,14 +208,14 @@ function getImage(imageUrl) { const HTTP = 'http:', HTTPS = 'https:'; -function getProxyAgent(opts) { +function getProxyAgent(opts: ClientOpts) { if (!opts.proxy) { return null; } const {protocol} = url.parse(opts.url); - if (![HTTP, HTTPS].includes(protocol)) { + if (!protocol || ![HTTP, HTTPS].includes(protocol)) { return null; } @@ -185,7 +226,7 @@ function getProxyAgent(opts) { return new AgentClass(opts.proxy); } -function getClient(opts) { +function getClient(opts: ClientOpts): Client { // it's not clear how to explicitly configure proxy (as opposed to system proxy), // so in that case, we always use node's modules if (utils.isElectron() && !opts.proxy) { @@ -203,11 +244,14 @@ function getClient(opts) { } } -function generateError(opts, message) { +function generateError(opts: { + method: string; + url: string; +}, message: string) { return new Error(`Request to ${opts.method} ${opts.url} failed, error: ${message}`); } -module.exports = { +export = { exec, getImage }; diff --git a/src/services/revisions.js b/src/services/revisions.ts similarity index 78% rename from src/services/revisions.js rename to src/services/revisions.ts index 7697a35f4..9cd281c13 100644 --- a/src/services/revisions.js +++ b/src/services/revisions.ts @@ -1,14 +1,12 @@ "use strict"; -const log = require('./log'); -const sql = require('./sql'); -const protectedSessionService = require('./protected_session'); -const dateUtils = require('./date_utils'); +import log = require('./log'); +import sql = require('./sql'); +import protectedSessionService = require('./protected_session'); +import dateUtils = require('./date_utils'); +import BNote = require('../becca/entities/bnote'); -/** - * @param {BNote} note - */ -function protectRevisions(note) { +function protectRevisions(note: BNote) { if (!protectedSessionService.isProtectedSessionAvailable()) { throw new Error(`Cannot (un)protect revisions of note '${note.noteId}' without active protected session`); } @@ -18,7 +16,7 @@ function protectRevisions(note) { try { const content = revision.getContent(); - revision.isProtected = note.isProtected; + revision.isProtected = !!note.isProtected; // this will force de/encryption revision.setContent(content, {forceSave: true}); @@ -46,6 +44,6 @@ function protectRevisions(note) { } } -module.exports = { +export = { protectRevisions }; diff --git a/src/services/scheduler.js b/src/services/scheduler.js index 2a712a1ce..d4f40cfd7 100644 --- a/src/services/scheduler.js +++ b/src/services/scheduler.js @@ -1,11 +1,11 @@ const scriptService = require('./script.js'); const cls = require('./cls'); -const sqlInit = require('./sql_init.js'); +const sqlInit = require('./sql_init'); const config = require('./config'); const log = require('./log'); const attributeService = require('../services/attributes.js'); const protectedSessionService = require('../services/protected_session'); -const hiddenSubtreeService = require('./hidden_subtree.js'); +const hiddenSubtreeService = require('./hidden_subtree'); /** * @param {BNote} note diff --git a/src/services/search/expressions/note_flat_text.js b/src/services/search/expressions/note_flat_text.js index e9fc2fad8..cf523bfbb 100644 --- a/src/services/search/expressions/note_flat_text.js +++ b/src/services/search/expressions/note_flat_text.js @@ -14,7 +14,7 @@ class NoteFlatTextExp extends Expression { execute(inputNoteSet, executionContext, searchContext) { // has deps on SQL which breaks unit test so needs to be dynamically required - const beccaService = require('../../../becca/becca_service.js'); + const beccaService = require('../../../becca/becca_service'); const resultNoteSet = new NoteSet(); /** diff --git a/src/services/search/search_result.js b/src/services/search/search_result.js index 61f7d86d8..ca3811f8e 100644 --- a/src/services/search/search_result.js +++ b/src/services/search/search_result.js @@ -1,6 +1,6 @@ "use strict"; -const beccaService = require('../../becca/becca_service.js'); +const beccaService = require('../../becca/becca_service'); const becca = require('../../becca/becca'); class SearchResult { diff --git a/src/services/search/services/search.js b/src/services/search/services/search.js index 498d31310..828c624a8 100644 --- a/src/services/search/services/search.js +++ b/src/services/search/services/search.js @@ -7,7 +7,7 @@ const parse = require('./parse.js'); const SearchResult = require('../search_result.js'); const SearchContext = require('../search_context.js'); const becca = require('../../../becca/becca'); -const beccaService = require('../../../becca/becca_service.js'); +const beccaService = require('../../../becca/becca_service'); const utils = require('../../utils'); const log = require('../../log'); const hoistedNoteService = require('../../hoisted_note.js'); diff --git a/src/services/setup.js b/src/services/setup.js index 7434659be..55e559e87 100644 --- a/src/services/setup.js +++ b/src/services/setup.js @@ -1,10 +1,10 @@ const syncService = require('./sync.js'); const log = require('./log'); -const sqlInit = require('./sql_init.js'); +const sqlInit = require('./sql_init'); const optionService = require('./options'); -const syncOptions = require('./sync_options.js'); -const request = require('./request.js'); -const appInfo = require('./app_info.js'); +const syncOptions = require('./sync_options'); +const request = require('./request'); +const appInfo = require('./app_info'); const utils = require('./utils'); const becca = require('../becca/becca'); diff --git a/src/services/special_notes.js b/src/services/special_notes.js index fd229a876..749f43325 100644 --- a/src/services/special_notes.js +++ b/src/services/special_notes.js @@ -1,13 +1,13 @@ const attributeService = require('./attributes.js'); const dateNoteService = require('./date_notes.js'); const becca = require('../becca/becca'); -const noteService = require('./notes.js'); +const noteService = require('./notes'); const dateUtils = require('./date_utils'); const log = require('./log'); const hoistedNoteService = require('./hoisted_note.js'); const searchService = require('./search/services/search.js'); const SearchContext = require('./search/search_context.js'); -const {LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT} = require('./hidden_subtree.js'); +const {LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT} = require('./hidden_subtree'); function getInboxNote(date) { const workspaceNote = hoistedNoteService.getWorkspaceNote(); diff --git a/src/services/sql.ts b/src/services/sql.ts index f0a9d7d29..671b136d2 100644 --- a/src/services/sql.ts +++ b/src/services/sql.ts @@ -143,8 +143,8 @@ function getRows(query: string, params: Params = []): T[] { return wrap(query, s => s.all(params)) as T[]; } -function getRawRows(query: string, params: Params = []): T[] | null { - return wrap(query, s => s.raw().all(params)) as T[] | null; +function getRawRows(query: string, params: Params = []): T[] { + return (wrap(query, s => s.raw().all(params)) as T[]) || []; } function iterateRows(query: string, params: Params = []) { @@ -259,7 +259,7 @@ function transactional(func: (statement: Statement) => T) { if (entityChangeIds.length > 0) { log.info("Transaction rollback dirtied the becca, forcing reload."); - require('../becca/becca_loader.js').load(); + require('../becca/becca_loader').load(); } // the maxEntityChangeId has been incremented during failed transaction, need to recalculate diff --git a/src/services/sql_init.js b/src/services/sql_init.ts similarity index 84% rename from src/services/sql_init.js rename to src/services/sql_init.ts index da28b35d8..af193d236 100644 --- a/src/services/sql_init.js +++ b/src/services/sql_init.ts @@ -1,17 +1,18 @@ -const log = require('./log'); -const fs = require('fs'); -const resourceDir = require('./resource_dir'); -const sql = require('./sql'); -const utils = require('./utils'); -const optionService = require('./options'); -const port = require('./port.js'); -const BOption = require('../becca/entities/boption'); -const TaskContext = require('./task_context'); -const migrationService = require('./migration.js'); -const cls = require('./cls'); -const config = require('./config'); +import log = require('./log'); +import fs = require('fs'); +import resourceDir = require('./resource_dir'); +import sql = require('./sql'); +import utils = require('./utils'); +import optionService = require('./options'); +import port = require('./port'); +import BOption = require('../becca/entities/boption'); +import TaskContext = require('./task_context'); +import migrationService = require('./migration'); +import cls = require('./cls'); +import config = require('./config'); +import { OptionRow } from '../becca/entities/rows'; -const dbReady = utils.deferred(); +const dbReady = utils.deferred(); cls.init(initDbConnection); @@ -50,7 +51,7 @@ async function createInitialDatabase() { throw new Error("DB is already initialized"); } - const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, 'UTF-8'); + const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, "utf-8"); const demoFile = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/demo.zip`); let rootNote; @@ -60,7 +61,7 @@ async function createInitialDatabase() { sql.executeScript(schema); - require('../becca/becca_loader.js').load(); + require('../becca/becca_loader').load(); const BNote = require('../becca/entities/bnote'); const BBranch = require('../becca/entities/bbranch'); @@ -119,14 +120,14 @@ async function createInitialDatabase() { initDbConnection(); } -function createDatabaseForSync(options, syncServerHost = '', syncProxy = '') { +function createDatabaseForSync(options: OptionRow[], syncServerHost = '', syncProxy = '') { log.info("Creating database for sync"); if (isDbInitialized()) { throw new Error("DB is already initialized"); } - const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, 'UTF-8'); + const schema = fs.readFileSync(`${resourceDir.DB_INIT_DIR}/schema.sql`, "utf8"); sql.transactional(() => { sql.executeScript(schema); @@ -166,10 +167,10 @@ dbReady.then(() => { return; } - setInterval(() => require('./backup.js').regularBackup(), 4 * 60 * 60 * 1000); + setInterval(() => require('./backup').regularBackup(), 4 * 60 * 60 * 1000); // kickoff first backup soon after start up - setTimeout(() => require('./backup.js').regularBackup(), 5 * 60 * 1000); + setTimeout(() => require('./backup').regularBackup(), 5 * 60 * 1000); // optimize is usually inexpensive no-op, so running it semi-frequently is not a big deal setTimeout(() => optimize(), 60 * 60 * 1000); @@ -183,7 +184,7 @@ function getDbSize() { log.info(`DB size: ${getDbSize()} KB`); -module.exports = { +export = { dbReady, schemaExists, isDbInitialized, diff --git a/src/services/sync.js b/src/services/sync.js index 4410e571a..347554528 100644 --- a/src/services/sync.js +++ b/src/services/sync.js @@ -8,14 +8,14 @@ const instanceId = require('./instance_id'); const dateUtils = require('./date_utils'); const syncUpdateService = require('./sync_update.js'); const contentHashService = require('./content_hash.js'); -const appInfo = require('./app_info.js'); -const syncOptions = require('./sync_options.js'); +const appInfo = require('./app_info'); +const syncOptions = require('./sync_options'); const syncMutexService = require('./sync_mutex'); const cls = require('./cls'); -const request = require('./request.js'); +const request = require('./request'); const ws = require('./ws'); const entityChangesService = require('./entity_changes'); -const entityConstructor = require('../becca/entity_constructor.js'); +const entityConstructor = require('../becca/entity_constructor'); const becca = require('../becca/becca'); let proxyToggle = true; @@ -399,7 +399,7 @@ function getOutstandingPullCount() { return outstandingPullCount; } -require('../becca/becca_loader.js').beccaLoaded.then(() => { +require('../becca/becca_loader').beccaLoaded.then(() => { setInterval(cls.wrap(sync), 60000); // kickoff initial sync immediately, but should happen after initial consistency checks diff --git a/src/services/sync_options.js b/src/services/sync_options.ts similarity index 90% rename from src/services/sync_options.js rename to src/services/sync_options.ts index a059be973..92aea6c3f 100644 --- a/src/services/sync_options.js +++ b/src/services/sync_options.ts @@ -1,7 +1,7 @@ "use strict"; -const optionService = require('./options'); -const config = require('./config'); +import optionService = require('./options'); +import config = require('./config'); /* * Primary configuration for sync is in the options (document), but we allow to override @@ -10,11 +10,11 @@ const config = require('./config'); * to live sync server. */ -function get(name) { +function get(name: string) { return (config['Sync'] && config['Sync'][name]) || optionService.getOption(name); } -module.exports = { +export = { // env variable is the easiest way to guarantee we won't overwrite prod data during development // after copying prod document/data directory getSyncServerHost: () => process.env.TRILIUM_SYNC_SERVER_HOST || get('syncServerHost'), diff --git a/src/services/sync_update.js b/src/services/sync_update.js index 6b6bbf556..af7b87e11 100644 --- a/src/services/sync_update.js +++ b/src/services/sync_update.js @@ -2,7 +2,7 @@ const sql = require('./sql'); const log = require('./log'); const entityChangesService = require('./entity_changes'); const eventService = require('./events'); -const entityConstructor = require('../becca/entity_constructor.js'); +const entityConstructor = require('../becca/entity_constructor'); const ws = require('./ws'); function updateEntities(entityChanges, instanceId) { diff --git a/src/services/window.js b/src/services/window.js index f855175c5..31bf373c5 100644 --- a/src/services/window.js +++ b/src/services/window.js @@ -1,13 +1,13 @@ const path = require('path'); const url = require("url"); -const port = require('./port.js'); +const port = require('./port'); const optionService = require('./options'); const env = require('./env'); const log = require('./log'); -const sqlInit = require('./sql_init.js'); +const sqlInit = require('./sql_init'); const cls = require('./cls'); -const keyboardActionsService = require('./keyboard_actions.js'); -const {ipcMain} = require('electron'); +const keyboardActionsService = require('./keyboard_actions'); +const { ipcMain } = require('electron'); // Prevent the window being garbage collected /** @type {Electron.BrowserWindow} */ @@ -18,7 +18,7 @@ let setupWindow; async function createExtraWindow(extraWindowHash) { const spellcheckEnabled = optionService.getOptionBool('spellCheckEnabled'); - const {BrowserWindow} = require('electron'); + const { BrowserWindow } = require('electron'); const win = new BrowserWindow({ width: 1000, @@ -55,7 +55,7 @@ async function createMainWindow(app) { const spellcheckEnabled = optionService.getOptionBool('spellCheckEnabled'); - const {BrowserWindow} = require('electron'); // should not be statically imported + const { BrowserWindow } = require('electron'); // should not be statically imported mainWindow = new BrowserWindow({ x: mainWindowState.x, @@ -128,7 +128,7 @@ function getIcon() { } async function createSetupWindow() { - const {BrowserWindow} = require('electron'); // should not be statically imported + const { BrowserWindow } = require('electron'); // should not be statically imported setupWindow = new BrowserWindow({ width: 800, height: 800, @@ -152,7 +152,7 @@ function closeSetupWindow() { } async function registerGlobalShortcuts() { - const {globalShortcut} = require('electron'); + const { globalShortcut } = require('electron'); await sqlInit.dbReady; diff --git a/src/share/content_renderer.js b/src/share/content_renderer.js index f29ca9307..9065fec08 100644 --- a/src/share/content_renderer.js +++ b/src/share/content_renderer.js @@ -1,6 +1,6 @@ const {JSDOM} = require("jsdom"); const shaca = require('./shaca/shaca.js'); -const assetPath = require('../services/asset_path.js'); +const assetPath = require('../services/asset_path'); const shareRoot = require('./share_root.js'); const escapeHtml = require('escape-html'); diff --git a/src/share/routes.js b/src/share/routes.js index 9e1eb5d17..b40048797 100644 --- a/src/share/routes.js +++ b/src/share/routes.js @@ -7,8 +7,8 @@ const shaca = require('./shaca/shaca.js'); const shacaLoader = require('./shaca/shaca_loader.js'); const shareRoot = require('./share_root.js'); const contentRenderer = require('./content_renderer.js'); -const assetPath = require('../services/asset_path.js'); -const appPath = require('../services/app_path.js'); +const assetPath = require('../services/asset_path'); +const appPath = require('../services/app_path'); const searchService = require('../services/search/services/search.js'); const SearchContext = require('../services/search/search_context.js'); const log = require('../services/log'); diff --git a/src/tools/generate_document.js b/src/tools/generate_document.js index 844b6e0f3..503fdc6ee 100644 --- a/src/tools/generate_document.js +++ b/src/tools/generate_document.js @@ -3,9 +3,9 @@ * will create 1000 new notes and some clones into the current document.db */ -require('../becca/entity_constructor.js'); -const sqlInit = require('../services/sql_init.js'); -const noteService = require('../services/notes.js'); +require('../becca/entity_constructor'); +const sqlInit = require('../services/sql_init'); +const noteService = require('../services/notes'); const attributeService = require('../services/attributes.js'); const cls = require('../services/cls'); const cloningService = require('../services/cloning.js'); diff --git a/src/types.d.ts b/src/types.d.ts new file mode 100644 index 000000000..c4ccea844 --- /dev/null +++ b/src/types.d.ts @@ -0,0 +1,14 @@ +/* + * This file contains type definitions for libraries that did not have one + * in its library or in `@types/*` packages. + */ + +declare module 'unescape' { + function unescape(str: string, type?: string): string; + export = unescape; +} + +declare module 'html2plaintext' { + function html2plaintext(htmlText: string): string; + export = html2plaintext; +} \ No newline at end of file diff --git a/src/types/unescape.d.ts b/src/types/unescape.d.ts deleted file mode 100644 index 465ebd9e9..000000000 --- a/src/types/unescape.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -declare module 'unescape' { - function unescape(str: string, type?: string): string; - export = unescape; -} \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index 0ce95ff39..31aa526d2 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -6,7 +6,9 @@ "outDir": "./dist", "strict": true, "noImplicitAny": true, - "lib": ["ES2022"] + "resolveJsonModule": true, + "lib": ["ES2022"], + "downlevelIteration": true }, "include": [ "./src/**/*.js", @@ -17,6 +19,6 @@ "files": true }, "files": [ - "src/types/unescape.d.ts" + "src/types.d.ts" ] } diff --git a/webpack.config.js b/webpack.config.js index 639642f52..41077c00e 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -1,5 +1,5 @@ const path = require('path'); -const assetPath = require('./src/services/asset_path.js'); +const assetPath = require('./src/services/asset_path'); module.exports = { mode: 'production',