Merge pull request #18 from TriliumNext/feature/typescript_backend_2

Convert backend to TypeScript (19% -> 35%)
This commit is contained in:
Elian Doran 2024-04-10 19:16:06 +03:00 committed by GitHub
commit a7ae16bb39
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
124 changed files with 1143 additions and 768 deletions

View File

@ -1,6 +1,6 @@
#!/usr/bin/env node
const anonymizationService = require('../src/services/anonymization.js');
const anonymizationService = require('../src/services/anonymization');
const fs = require('fs');
const path = require('path');

View File

@ -26,9 +26,9 @@ jq '.version = "'$VERSION'"' package.json|sponge package.json
git add package.json
echo 'module.exports = { buildDate:"'`date --iso-8601=seconds`'", buildRevision: "'`git log -1 --format="%H"`'" };' > src/services/build.js
echo 'export = { buildDate:"'`date --iso-8601=seconds`'", buildRevision: "'`git log -1 --format="%H"`'" };' > src/services/build.ts
git add src/services/build.js
git add src/services/build.ts
TAG=v$VERSION

View File

@ -1,5 +1,5 @@
module.exports = () => {
const beccaLoader = require('../../src/becca/becca_loader.js');
const beccaLoader = require('../../src/becca/becca_loader');
const becca = require('../../src/becca/becca');
const cls = require('../../src/services/cls');
const log = require('../../src/services/log');

View File

@ -10,7 +10,7 @@ if (config.Network.https) {
process.exit(0);
}
const port = require('./src/services/port.js');
const port = require('./src/services/port.ts');
const host = require('./src/services/host.js');
const options = { timeout: 2000 };

View File

@ -1,7 +1,7 @@
'use strict';
const {app, globalShortcut, BrowserWindow} = require('electron');
const sqlInit = require('./src/services/sql_init.js');
const sqlInit = require('./src/services/sql_init');
const appIconService = require('./src/services/app_icon.js');
const windowService = require('./src/services/window.js');
const tray = require('./src/services/tray.js');

220
package-lock.json generated
View File

@ -93,8 +93,10 @@
"@types/escape-html": "^1.0.4",
"@types/express": "^4.17.21",
"@types/ini": "^4.1.0",
"@types/jsdom": "^21.1.6",
"@types/mime-types": "^2.1.4",
"@types/node": "^20.11.19",
"@types/sanitize-html": "^2.11.0",
"@types/ws": "^8.5.10",
"cross-env": "7.0.3",
"electron": "25.9.8",
@ -1333,6 +1335,41 @@
"integrity": "sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==",
"dev": true
},
"node_modules/@types/jsdom": {
"version": "21.1.6",
"resolved": "https://registry.npmjs.org/@types/jsdom/-/jsdom-21.1.6.tgz",
"integrity": "sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==",
"dev": true,
"dependencies": {
"@types/node": "*",
"@types/tough-cookie": "*",
"parse5": "^7.0.0"
}
},
"node_modules/@types/jsdom/node_modules/entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true,
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/@types/jsdom/node_modules/parse5": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz",
"integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==",
"dev": true,
"dependencies": {
"entities": "^4.4.0"
},
"funding": {
"url": "https://github.com/inikulin/parse5?sponsor=1"
}
},
"node_modules/@types/json-schema": {
"version": "7.0.9",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz",
@ -1439,6 +1476,89 @@
"@types/node": "*"
}
},
"node_modules/@types/sanitize-html": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.11.0.tgz",
"integrity": "sha512-7oxPGNQHXLHE48r/r/qjn7q0hlrs3kL7oZnGj0Wf/h9tj/6ibFyRkNbsDxaBBZ4XUZ0Dx5LGCyDJ04ytSofacQ==",
"dev": true,
"dependencies": {
"htmlparser2": "^8.0.0"
}
},
"node_modules/@types/sanitize-html/node_modules/dom-serializer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
"dev": true,
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.2",
"entities": "^4.2.0"
},
"funding": {
"url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
}
},
"node_modules/@types/sanitize-html/node_modules/domhandler": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
"dev": true,
"dependencies": {
"domelementtype": "^2.3.0"
},
"engines": {
"node": ">= 4"
},
"funding": {
"url": "https://github.com/fb55/domhandler?sponsor=1"
}
},
"node_modules/@types/sanitize-html/node_modules/domutils": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz",
"integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==",
"dev": true,
"dependencies": {
"dom-serializer": "^2.0.0",
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3"
},
"funding": {
"url": "https://github.com/fb55/domutils?sponsor=1"
}
},
"node_modules/@types/sanitize-html/node_modules/entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true,
"engines": {
"node": ">=0.12"
},
"funding": {
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/@types/sanitize-html/node_modules/htmlparser2": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
"integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==",
"dev": true,
"funding": [
"https://github.com/fb55/htmlparser2?sponsor=1",
{
"type": "github",
"url": "https://github.com/sponsors/fb55"
}
],
"dependencies": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3",
"domutils": "^3.0.1",
"entities": "^4.4.0"
}
},
"node_modules/@types/send": {
"version": "0.17.4",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
@ -1460,6 +1580,12 @@
"@types/node": "*"
}
},
"node_modules/@types/tough-cookie": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
"dev": true
},
"node_modules/@types/unist": {
"version": "2.0.10",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz",
@ -14124,6 +14250,34 @@
"integrity": "sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==",
"dev": true
},
"@types/jsdom": {
"version": "21.1.6",
"resolved": "https://registry.npmjs.org/@types/jsdom/-/jsdom-21.1.6.tgz",
"integrity": "sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==",
"dev": true,
"requires": {
"@types/node": "*",
"@types/tough-cookie": "*",
"parse5": "^7.0.0"
},
"dependencies": {
"entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true
},
"parse5": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz",
"integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==",
"dev": true,
"requires": {
"entities": "^4.4.0"
}
}
}
},
"@types/json-schema": {
"version": "7.0.9",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz",
@ -14230,6 +14384,66 @@
"@types/node": "*"
}
},
"@types/sanitize-html": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.11.0.tgz",
"integrity": "sha512-7oxPGNQHXLHE48r/r/qjn7q0hlrs3kL7oZnGj0Wf/h9tj/6ibFyRkNbsDxaBBZ4XUZ0Dx5LGCyDJ04ytSofacQ==",
"dev": true,
"requires": {
"htmlparser2": "^8.0.0"
},
"dependencies": {
"dom-serializer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
"dev": true,
"requires": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.2",
"entities": "^4.2.0"
}
},
"domhandler": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
"dev": true,
"requires": {
"domelementtype": "^2.3.0"
}
},
"domutils": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz",
"integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==",
"dev": true,
"requires": {
"dom-serializer": "^2.0.0",
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3"
}
},
"entities": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true
},
"htmlparser2": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
"integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==",
"dev": true,
"requires": {
"domelementtype": "^2.3.0",
"domhandler": "^5.0.3",
"domutils": "^3.0.1",
"entities": "^4.4.0"
}
}
}
},
"@types/send": {
"version": "0.17.4",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz",
@ -14251,6 +14465,12 @@
"@types/node": "*"
}
},
"@types/tough-cookie": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz",
"integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==",
"dev": true
},
"@types/unist": {
"version": "2.0.10",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz",

View File

@ -114,8 +114,10 @@
"@types/escape-html": "^1.0.4",
"@types/express": "^4.17.21",
"@types/ini": "^4.1.0",
"@types/jsdom": "^21.1.6",
"@types/mime-types": "^2.1.4",
"@types/node": "^20.11.19",
"@types/sanitize-html": "^2.11.0",
"@types/ws": "^8.5.10",
"cross-env": "7.0.3",
"electron": "25.9.8",

View File

@ -1,6 +1,6 @@
const anonymizationService = require('./services/anonymization.js');
const sqlInit = require('./services/sql_init.js');
require('./becca/entity_constructor.js');
const anonymizationService = require('./services/anonymization');
const sqlInit = require('./services/sql_init');
require('./becca/entity_constructor');
sqlInit.dbReady.then(async () => {
try {

View File

@ -8,7 +8,7 @@ const sessionParser = require('./routes/session_parser.js');
const utils = require('./services/utils');
require('./services/handlers.js');
require('./becca/becca_loader.js');
require('./becca/becca_loader');
const app = express();
@ -46,7 +46,7 @@ require('./routes/error_handlers.js').register(app);
require('./services/sync.js');
// triggers backup timer
require('./services/backup.js');
require('./services/backup');
// trigger consistency checks timer
require('./services/consistency_checks.js');

View File

@ -21,7 +21,7 @@ interface AttachmentOpts {
* Becca is a backend cache of all notes, branches, and attributes.
* There's a similar frontend cache Froca, and share cache Shaca.
*/
class Becca {
export default class Becca {
loaded!: boolean;
notes!: Record<string, BNote>;
@ -280,4 +280,12 @@ class Becca {
}
}
export = Becca;
/**
* This interface contains the data that is shared across all the objects of a given derived class of {@link AbstractBeccaEntity}.
* For example, all BAttributes will share their content, but all BBranches will have another set of this data.
*/
export interface ConstructorData<T extends AbstractBeccaEntity<T>> {
primaryKeyName: string;
entityName: string;
hashedProperties: (keyof T)[];
}

View File

@ -1,6 +1,6 @@
"use strict";
import Becca = require("./becca-interface");
import Becca from "./becca-interface";
const becca = new Becca();

View File

@ -1,19 +1,21 @@
"use strict";
const sql = require('../services/sql');
const eventService = require('../services/events');
const becca = require('./becca');
const sqlInit = require('../services/sql_init');
const log = require('../services/log');
const BNote = require('./entities/bnote');
const BBranch = require('./entities/bbranch');
const BAttribute = require('./entities/battribute');
const BOption = require('./entities/boption');
const BEtapiToken = require('./entities/betapi_token');
const cls = require('../services/cls');
const entityConstructor = require('../becca/entity_constructor');
import sql = require('../services/sql');
import eventService = require('../services/events');
import becca = require('./becca');
import sqlInit = require('../services/sql_init');
import log = require('../services/log');
import BNote = require('./entities/bnote');
import BBranch = require('./entities/bbranch');
import BAttribute = require('./entities/battribute');
import BOption = require('./entities/boption');
import BEtapiToken = require('./entities/betapi_token');
import cls = require('../services/cls');
import entityConstructor = require('../becca/entity_constructor');
import { AttributeRow, BranchRow, EtapiTokenRow, NoteRow, OptionRow } from './entities/rows';
import AbstractBeccaEntity = require('./entities/abstract_becca_entity');
const beccaLoaded = new Promise((res, rej) => {
const beccaLoaded = new Promise<void>((res, rej) => {
sqlInit.dbReady.then(() => {
cls.init(() => {
load();
@ -38,23 +40,23 @@ function load() {
new BNote().update(row).init();
}
const branchRows = sql.getRawRows(`SELECT branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified FROM branches WHERE isDeleted = 0`);
const branchRows = sql.getRawRows<BranchRow>(`SELECT branchId, noteId, parentNoteId, prefix, notePosition, isExpanded, utcDateModified FROM branches WHERE isDeleted = 0`);
// in-memory sort is faster than in the DB
branchRows.sort((a, b) => a.notePosition - b.notePosition);
branchRows.sort((a, b) => (a.notePosition || 0) - (b.notePosition || 0));
for (const row of branchRows) {
new BBranch().update(row).init();
}
for (const row of sql.getRawRows(`SELECT attributeId, noteId, type, name, value, isInheritable, position, utcDateModified FROM attributes WHERE isDeleted = 0`)) {
for (const row of sql.getRawRows<AttributeRow>(`SELECT attributeId, noteId, type, name, value, isInheritable, position, utcDateModified FROM attributes WHERE isDeleted = 0`)) {
new BAttribute().update(row).init();
}
for (const row of sql.getRows(`SELECT name, value, isSynced, utcDateModified FROM options`)) {
for (const row of sql.getRows<OptionRow>(`SELECT name, value, isSynced, utcDateModified FROM options`)) {
new BOption(row);
}
for (const row of sql.getRows(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) {
for (const row of sql.getRows<EtapiTokenRow>(`SELECT etapiTokenId, name, tokenHash, utcDateCreated, utcDateModified FROM etapi_tokens WHERE isDeleted = 0`)) {
new BEtapiToken(row);
}
});
@ -68,7 +70,7 @@ function load() {
log.info(`Becca (note cache) load took ${Date.now() - start}ms`);
}
function reload(reason) {
function reload(reason: string) {
load();
require('../services/ws').reloadFrontend(reason || "becca reloaded");
@ -88,7 +90,7 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_CHANGE_SYNCED], ({ entity
if (beccaEntity) {
beccaEntity.updateFromRow(entityRow);
} else {
beccaEntity = new EntityClass();
beccaEntity = new EntityClass() as AbstractBeccaEntity<AbstractBeccaEntity<any>>;
beccaEntity.updateFromRow(entityRow);
beccaEntity.init();
}
@ -112,7 +114,7 @@ eventService.subscribeBeccaLoader(eventService.ENTITY_CHANGED, ({ entityName, en
* @param entityRow - can be a becca entity (change comes from this trilium instance) or just a row (from sync).
* It should be therefore treated as a row.
*/
function postProcessEntityUpdate(entityName, entityRow) {
function postProcessEntityUpdate(entityName: string, entityRow: any) {
if (entityName === 'notes') {
noteUpdated(entityRow);
} else if (entityName === 'branches') {
@ -140,13 +142,13 @@ eventService.subscribeBeccaLoader([eventService.ENTITY_DELETED, eventService.ENT
}
});
function noteDeleted(noteId) {
function noteDeleted(noteId: string) {
delete becca.notes[noteId];
becca.dirtyNoteSetCache();
}
function branchDeleted(branchId) {
function branchDeleted(branchId: string) {
const branch = becca.branches[branchId];
if (!branch) {
@ -173,23 +175,26 @@ function branchDeleted(branchId) {
}
delete becca.childParentToBranch[`${branch.noteId}-${branch.parentNoteId}`];
delete becca.branches[branch.branchId];
}
function noteUpdated(entityRow) {
const note = becca.notes[entityRow.noteId];
if (note) {
// type / mime could have been changed, and they are present in flatTextCache
note.flatTextCache = null;
if (branch.branchId) {
delete becca.branches[branch.branchId];
}
}
function branchUpdated(branchRow) {
function noteUpdated(entityRow: NoteRow) {
const note = becca.notes[entityRow.noteId];
if (note) {
// TODO, this wouldn't have worked in the original implementation since the variable was named __flatTextCache.
// type / mime could have been changed, and they are present in flatTextCache
note.__flatTextCache = null;
}
}
function branchUpdated(branchRow: BranchRow) {
const childNote = becca.notes[branchRow.noteId];
if (childNote) {
childNote.flatTextCache = null;
childNote.__flatTextCache = null;
childNote.sortParents();
// notes in the subtree can get new inherited attributes
@ -204,7 +209,7 @@ function branchUpdated(branchRow) {
}
}
function attributeDeleted(attributeId) {
function attributeDeleted(attributeId: string) {
const attribute = becca.attributes[attributeId];
if (!attribute) {
@ -239,8 +244,7 @@ function attributeDeleted(attributeId) {
}
}
/** @param {BAttribute} attributeRow */
function attributeUpdated(attributeRow) {
function attributeUpdated(attributeRow: BAttribute) {
const attribute = becca.attributes[attributeRow.attributeId];
const note = becca.notes[attributeRow.noteId];
@ -253,7 +257,7 @@ function attributeUpdated(attributeRow) {
}
}
function noteReorderingUpdated(branchIdList) {
function noteReorderingUpdated(branchIdList: number[]) {
const parentNoteIds = new Set();
for (const branchId in branchIdList) {
@ -267,7 +271,7 @@ function noteReorderingUpdated(branchIdList) {
}
}
function etapiTokenDeleted(etapiTokenId) {
function etapiTokenDeleted(etapiTokenId: string) {
delete becca.etapiTokens[etapiTokenId];
}
@ -275,14 +279,14 @@ eventService.subscribeBeccaLoader(eventService.ENTER_PROTECTED_SESSION, () => {
try {
becca.decryptProtectedNotes();
}
catch (e) {
catch (e: any) {
log.error(`Could not decrypt protected notes: ${e.message} ${e.stack}`);
}
});
eventService.subscribeBeccaLoader(eventService.LEAVE_PROTECTED_SESSION, load);
module.exports = {
export = {
load,
reload,
beccaLoaded

View File

@ -1,10 +1,10 @@
"use strict";
const becca = require('./becca');
const cls = require('../services/cls');
const log = require('../services/log');
import becca = require('./becca');
import cls = require('../services/cls');
import log = require('../services/log');
function isNotePathArchived(notePath) {
function isNotePathArchived(notePath: string[]) {
const noteId = notePath[notePath.length - 1];
const note = becca.notes[noteId];
@ -24,9 +24,9 @@ function isNotePathArchived(notePath) {
return false;
}
function getNoteTitle(childNoteId, parentNoteId) {
function getNoteTitle(childNoteId: string, parentNoteId?: string) {
const childNote = becca.notes[childNoteId];
const parentNote = becca.notes[parentNoteId];
const parentNote = parentNoteId ? becca.notes[parentNoteId] : null;
if (!childNote) {
log.info(`Cannot find note '${childNoteId}'`);
@ -40,7 +40,7 @@ function getNoteTitle(childNoteId, parentNoteId) {
return `${(branch && branch.prefix) ? `${branch.prefix} - ` : ''}${title}`;
}
function getNoteTitleArrayForPath(notePathArray) {
function getNoteTitleArrayForPath(notePathArray: string[]) {
if (!notePathArray || !Array.isArray(notePathArray)) {
throw new Error(`${notePathArray} is not an array.`);
}
@ -76,13 +76,13 @@ function getNoteTitleArrayForPath(notePathArray) {
return titles;
}
function getNoteTitleForPath(notePathArray) {
function getNoteTitleForPath(notePathArray: string[]) {
const titles = getNoteTitleArrayForPath(notePathArray);
return titles.join(' / ');
}
module.exports = {
export = {
getNoteTitle,
getNoteTitleForPath,
isNotePathArchived

View File

@ -9,25 +9,15 @@ import cls = require('../../services/cls');
import log = require('../../services/log');
import protectedSessionService = require('../../services/protected_session');
import blobService = require('../../services/blob');
import Becca = require('../becca-interface');
import Becca, { ConstructorData } from '../becca-interface';
let becca: Becca | null = null;
let becca: Becca;
interface ContentOpts {
forceSave?: boolean;
forceFrontendReload?: boolean;
}
/**
* This interface contains the data that is shared across all the objects of a given derived class of {@link AbstractBeccaEntity}.
* For example, all BAttributes will share their content, but all BBranches will have another set of this data.
*/
interface ConstructorData<T extends AbstractBeccaEntity<T>> {
primaryKeyName: string;
entityName: string;
hashedProperties: (keyof T)[];
}
/**
* Base class for all backend entities.
*
@ -35,10 +25,11 @@ interface ConstructorData<T extends AbstractBeccaEntity<T>> {
*/
abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
protected utcDateCreated?: string;
protected utcDateModified?: string;
protected dateCreated?: string;
protected dateModified?: string;
utcDateCreated!: string;
isProtected?: boolean;
isSynced?: boolean;
@ -101,6 +92,12 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
abstract getPojo(): {};
init() {
// Do nothing by default, can be overriden in derived classes.
}
abstract updateFromRow(row: unknown): void;
get isDeleted(): boolean {
// TODO: Not sure why some entities don't implement it.
return false;
@ -109,13 +106,14 @@ abstract class AbstractBeccaEntity<T extends AbstractBeccaEntity<T>> {
/**
* Saves entity - executes SQL, but doesn't commit the transaction on its own
*/
save(): this {
// TODO: opts not used but called a few times, maybe should be used by derived classes or passed to beforeSaving.
save(opts?: {}): this {
const constructorData = (this.constructor as unknown as ConstructorData<T>);
const entityName = constructorData.entityName;
const primaryKeyName = constructorData.primaryKeyName;
const isNewEntity = !(this as any)[primaryKeyName];
this.beforeSaving();
const pojo = this.getPojoToSave();

View File

@ -37,16 +37,16 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
noteId?: number;
attachmentId?: string;
/** either noteId or revisionId to which this attachment belongs */
ownerId: string;
role: string;
mime: string;
title: string;
ownerId!: string;
role!: string;
mime!: string;
title!: string;
type?: keyof typeof attachmentRoleToNoteTypeMapping;
position?: number;
blobId?: string;
isProtected?: boolean;
dateModified?: string;
utcDateScheduledForErasureSince?: string;
utcDateScheduledForErasureSince?: string | null;
/** optionally added to the entity */
contentLength?: number;
isDecrypted?: boolean;
@ -54,6 +54,11 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
constructor(row: AttachmentRow) {
super();
this.updateFromRow(row);
this.decrypt();
}
updateFromRow(row: AttachmentRow): void {
if (!row.ownerId?.trim()) {
throw new Error("'ownerId' must be given to initialize a Attachment entity");
} else if (!row.role?.trim()) {
@ -76,8 +81,6 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
this.utcDateModified = row.utcDateModified;
this.utcDateScheduledForErasureSince = row.utcDateScheduledForErasureSince;
this.contentLength = row.contentLength;
this.decrypt();
}
copy(): BAttachment {
@ -127,8 +130,8 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
}
}
getContent(): string | Buffer {
return this._getContent();
getContent(): Buffer {
return this._getContent() as Buffer;
}
setContent(content: string | Buffer, opts: ContentOpts) {
@ -171,6 +174,11 @@ class BAttachment extends AbstractBeccaEntity<BAttachment> {
if (this.role === 'image' && parentNote.type === 'text') {
const origContent = parentNote.getContent();
if (typeof origContent !== "string") {
throw new Error(`Note with ID '${note.noteId} has a text type but non-string content.`);
}
const oldAttachmentUrl = `api/attachments/${this.attachmentId}/image/`;
const newNoteUrl = `api/images/${note.noteId}/`;

View File

@ -28,7 +28,7 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
value!: string;
isInheritable!: boolean;
constructor(row: AttributeRow) {
constructor(row?: AttributeRow) {
super();
if (!row) {
@ -52,7 +52,7 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
]);
}
update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]: any[]) {
update([attributeId, noteId, type, name, value, isInheritable, position, utcDateModified]: any) {
this.attributeId = attributeId;
this.noteId = noteId;
this.type = type;

View File

@ -1,18 +1,24 @@
import AbstractBeccaEntity = require("./abstract_becca_entity");
import { BlobRow } from "./rows";
// TODO: Why this does not extend the abstract becca?
class BBlob {
class BBlob extends AbstractBeccaEntity<BBlob> {
static get entityName() { return "blobs"; }
static get primaryKeyName() { return "blobId"; }
static get hashedProperties() { return ["blobId", "content"]; }
blobId: string;
content: string | Buffer;
contentLength: number;
dateModified: string;
utcDateModified: string;
blobId!: string;
content!: string | Buffer;
contentLength!: number;
dateModified!: string;
utcDateModified!: string;
constructor(row: BlobRow) {
super();
this.updateFromRow(row);
}
updateFromRow(row: BlobRow): void {
this.blobId = row.blobId;
this.content = row.content;
this.contentLength = row.contentLength;

View File

@ -30,7 +30,7 @@ class BBranch extends AbstractBeccaEntity<BBranch> {
isExpanded!: boolean;
utcDateModified?: string;
constructor(row: BranchRow) {
constructor(row?: BranchRow) {
super();
if (!row) {

View File

@ -19,12 +19,12 @@ class BEtapiToken extends AbstractBeccaEntity<BEtapiToken> {
static get primaryKeyName() { return "etapiTokenId"; }
static get hashedProperties() { return ["etapiTokenId", "name", "tokenHash", "utcDateCreated", "utcDateModified", "isDeleted"]; }
etapiTokenId!: string;
etapiTokenId?: string;
name!: string;
tokenHash!: string;
private _isDeleted!: boolean;
private _isDeleted?: boolean;
constructor(row: EtapiTokenRow) {
constructor(row?: EtapiTokenRow) {
super();
if (!row) {
@ -36,7 +36,7 @@ class BEtapiToken extends AbstractBeccaEntity<BEtapiToken> {
}
get isDeleted() {
return this._isDeleted;
return !!this._isDeleted;
}
updateFromRow(row: EtapiTokenRow) {
@ -74,7 +74,9 @@ class BEtapiToken extends AbstractBeccaEntity<BEtapiToken> {
super.beforeSaving();
this.becca.etapiTokens[this.etapiTokenId] = this;
if (this.etapiTokenId) {
this.becca.etapiTokens[this.etapiTokenId] = this;
}
}
}

View File

@ -70,7 +70,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
children!: BNote[];
targetRelations!: BAttribute[];
private __flatTextCache!: string | null;
__flatTextCache!: string | null;
private __attributeCache!: BAttribute[] | null;
private __inheritableAttributeCache!: BAttribute[] | null;
@ -86,7 +86,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
/** number of note revisions for this note */
private revisionCount!: number | null;
constructor(row: Partial<NoteRow>) {
constructor(row?: Partial<NoteRow>) {
super();
if (!row) {
@ -216,9 +216,8 @@ class BNote extends AbstractBeccaEntity<BNote> {
* - changes in the note metadata or title should not trigger note content sync (so we keep separate utcDateModified and entity changes records)
* - but to the user note content and title changes are one and the same - single dateModified (so all changes must go through Note and content is not a separate entity)
*/
// TODO: original declaration was (string | Buffer), but everywhere it's used as a string.
getContent(): string {
return this._getContent() as string;
getContent() {
return this._getContent();
}
/**
@ -226,7 +225,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
getJsonContent(): {} | null {
const content = this.getContent();
if (!content || !content.trim()) {
if (typeof content !== "string" || !content || !content.trim()) {
return null;
}
@ -243,7 +242,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
}
}
setContent(content: string, opts: ContentOpts = {}) {
setContent(content: Buffer | string, opts: ContentOpts = {}) {
this._setContent(content, opts);
eventService.emit(eventService.NOTE_CONTENT_CHANGE, { entity: this });
@ -661,7 +660,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
* @param name - relation name to filter
* @returns all note's relations (attributes with type relation), including inherited ones
*/
getRelations(name: string): BAttribute[] {
getRelations(name?: string): BAttribute[] {
return this.getAttributes(RELATION, name);
}
@ -1510,6 +1509,10 @@ class BNote extends AbstractBeccaEntity<BNote> {
const oldNoteUrl = `api/images/${this.noteId}/`;
const newAttachmentUrl = `api/attachments/${attachment.attachmentId}/image/`;
if (typeof parentContent !== "string") {
throw new Error("Unable to convert image note into attachment because parent note does not have a string content.");
}
const fixedContent = utils.replaceAll(parentContent, oldNoteUrl, newAttachmentUrl);
parentNote.setContent(fixedContent);
@ -1611,7 +1614,7 @@ class BNote extends AbstractBeccaEntity<BNote> {
revisionAttachment.ownerId = revision.revisionId;
revisionAttachment.setContent(noteAttachment.getContent(), { forceSave: true });
if (this.type === 'text') {
if (this.type === 'text' && typeof noteContent === "string") {
// content is rewritten to point to the revision attachments
noteContent = noteContent.replaceAll(`attachments/${noteAttachment.attachmentId}`,
`attachments/${revisionAttachment.attachmentId}`);
@ -1654,7 +1657,10 @@ class BNote extends AbstractBeccaEntity<BNote> {
position
});
content = content || "";
if (!content) {
throw new Error("Attempted to save an attachment with no content.");
}
attachment.setContent(content, {forceSave: true});
return attachment;

View File

@ -16,10 +16,12 @@ class BOption extends AbstractBeccaEntity<BOption> {
value!: string;
isSynced!: boolean;
constructor(row: OptionRow) {
constructor(row?: OptionRow) {
super();
this.updateFromRow(row);
if (row) {
this.updateFromRow(row);
}
this.becca.options[this.name] = this;
}

View File

@ -11,14 +11,19 @@ import AbstractBeccaEntity = require('./abstract_becca_entity');
class BRecentNote extends AbstractBeccaEntity<BRecentNote> {
static get entityName() { return "recent_notes"; }
static get primaryKeyName() { return "noteId"; }
static get hashedProperties() { return ["noteId", "notePath"]; }
noteId: string;
notePath: string;
utcDateCreated: string;
noteId!: string;
notePath!: string;
utcDateCreated!: string;
constructor(row: RecentNoteRow) {
super();
this.updateFromRow(row);
}
updateFromRow(row: RecentNoteRow): void {
this.noteId = row.noteId;
this.notePath = row.notePath;
this.utcDateCreated = row.utcDateCreated || dateUtils.utcNowDateTime();

View File

@ -29,22 +29,30 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
"utcDateLastEdited", "utcDateCreated", "utcDateModified", "blobId"]; }
revisionId?: string;
noteId: string;
type: string;
mime: string;
isProtected: boolean;
title: string;
noteId!: string;
type!: string;
mime!: string;
isProtected!: boolean;
title!: string;
blobId?: string;
dateLastEdited?: string;
dateCreated: string;
dateCreated!: string;
utcDateLastEdited?: string;
utcDateCreated: string;
utcDateCreated!: string;
contentLength?: number;
content?: string;
constructor(row: RevisionRow, titleDecrypted = false) {
super();
this.updateFromRow(row);
if (this.isProtected && !titleDecrypted) {
const decryptedTitle = protectedSessionService.isProtectedSessionAvailable() ? protectedSessionService.decryptString(this.title) : null;
this.title = decryptedTitle || "[protected]";
}
}
updateFromRow(row: RevisionRow) {
this.revisionId = row.revisionId;
this.noteId = row.noteId;
this.type = row.type;
@ -58,11 +66,6 @@ class BRevision extends AbstractBeccaEntity<BRevision> {
this.utcDateCreated = row.utcDateCreated;
this.utcDateModified = row.utcDateModified;
this.contentLength = row.contentLength;
if (this.isProtected && !titleDecrypted) {
const decryptedTitle = protectedSessionService.isProtectedSessionAvailable() ? protectedSessionService.decryptString(this.title) : null;
this.title = decryptedTitle || "[protected]";
}
}
getNote() {

View File

@ -5,7 +5,7 @@ export interface AttachmentRow {
ownerId?: string;
role: string;
mime: string;
title?: string;
title: string;
position?: number;
blobId?: string;
isProtected?: boolean;
@ -13,7 +13,7 @@ export interface AttachmentRow {
utcDateModified?: string;
utcDateScheduledForErasureSince?: string;
contentLength?: number;
content?: string;
content?: Buffer | string;
}
export interface RevisionRow {
@ -46,12 +46,12 @@ export interface OptionRow {
}
export interface EtapiTokenRow {
etapiTokenId: string;
etapiTokenId?: string;
name: string;
tokenHash: string;
utcDateCreated?: string;
utcDateModified?: string;
isDeleted: boolean;
isDeleted?: boolean;
}
export interface BlobRow {
@ -69,9 +69,9 @@ export interface AttributeRow {
noteId: string;
type: AttributeType;
name: string;
position: number;
value: string;
isInheritable: boolean;
position?: number;
value?: string;
isInheritable?: boolean;
utcDateModified?: string;
}
@ -79,9 +79,10 @@ export interface BranchRow {
branchId?: string;
noteId: string;
parentNoteId: string;
prefix: string | null;
notePosition: number;
isExpanded: boolean;
prefix?: string | null;
notePosition: number | null;
isExpanded?: boolean;
isDeleted?: boolean;
utcDateModified?: string;
}
@ -94,13 +95,15 @@ export type NoteType = ("file" | "image" | "search" | "noteMap" | "launcher" | "
export interface NoteRow {
noteId: string;
deleteId: string;
title: string;
type: NoteType;
mime: string;
isProtected: boolean;
isDeleted: boolean;
blobId: string;
dateCreated: string;
dateModified: string;
utcDateCreated: string;
utcDateModified: string;
}
}

View File

@ -1,33 +0,0 @@
const BAttachment = require('./entities/battachment');
const BAttribute = require('./entities/battribute');
const BBlob = require('./entities/bblob');
const BBranch = require('./entities/bbranch');
const BEtapiToken = require('./entities/betapi_token');
const BNote = require('./entities/bnote');
const BOption = require('./entities/boption');
const BRecentNote = require('./entities/brecent_note');
const BRevision = require('./entities/brevision');
const ENTITY_NAME_TO_ENTITY = {
"attachments": BAttachment,
"attributes": BAttribute,
"blobs": BBlob,
"branches": BBranch,
"etapi_tokens": BEtapiToken,
"notes": BNote,
"options": BOption,
"recent_notes": BRecentNote,
"revisions": BRevision
};
function getEntityFromEntityName(entityName) {
if (!(entityName in ENTITY_NAME_TO_ENTITY)) {
throw new Error(`Entity for table '${entityName}' not found!`);
}
return ENTITY_NAME_TO_ENTITY[entityName];
}
module.exports = {
getEntityFromEntityName
};

View File

@ -0,0 +1,37 @@
import { ConstructorData } from './becca-interface';
import AbstractBeccaEntity = require('./entities/abstract_becca_entity');
import BAttachment = require('./entities/battachment');
import BAttribute = require('./entities/battribute');
import BBlob = require('./entities/bblob');
import BBranch = require('./entities/bbranch');
import BEtapiToken = require('./entities/betapi_token');
import BNote = require('./entities/bnote');
import BOption = require('./entities/boption');
import BRecentNote = require('./entities/brecent_note');
import BRevision = require('./entities/brevision');
type EntityClass = new (row?: any) => AbstractBeccaEntity<any>;
const ENTITY_NAME_TO_ENTITY: Record<string, ConstructorData<any> & EntityClass> = {
"attachments": BAttachment,
"attributes": BAttribute,
"blobs": BBlob,
"branches": BBranch,
"etapi_tokens": BEtapiToken,
"notes": BNote,
"options": BOption,
"recent_notes": BRecentNote,
"revisions": BRevision
};
function getEntityFromEntityName(entityName: keyof typeof ENTITY_NAME_TO_ENTITY) {
if (!(entityName in ENTITY_NAME_TO_ENTITY)) {
throw new Error(`Entity for table '${entityName}' not found!`);
}
return ENTITY_NAME_TO_ENTITY[entityName];
}
export = {
getEntityFromEntityName
};

View File

@ -1,8 +1,9 @@
const becca = require('./becca');
const log = require('../services/log');
const beccaService = require('./becca_service');
const dateUtils = require('../services/date_utils');
const {JSDOM} = require("jsdom");
import becca = require('./becca');
import log = require('../services/log');
import beccaService = require('./becca_service');
import dateUtils = require('../services/date_utils');
import { JSDOM } from "jsdom";
import BNote = require('./entities/bnote');
const DEBUG = false;
@ -32,21 +33,25 @@ const IGNORED_ATTR_NAMES = [
"pageurl",
];
function filterUrlValue(value) {
interface DateLimits {
minDate: string;
minExcludedDate: string;
maxExcludedDate: string;
maxDate: string;
}
function filterUrlValue(value: string) {
return value
.replace(/https?:\/\//ig, "")
.replace(/www.js\./ig, "")
.replace(/(\.net|\.com|\.org|\.info|\.edu)/ig, "");
}
/**
* @param {BNote} note
*/
function buildRewardMap(note) {
function buildRewardMap(note: BNote) {
// Need to use Map instead of object: https://github.com/zadam/trilium/issues/1895
const map = new Map();
function addToRewardMap(text, rewardFactor) {
function addToRewardMap(text: string | undefined | null, rewardFactor: number) {
if (!text) {
return;
}
@ -126,7 +131,7 @@ function buildRewardMap(note) {
const content = note.getContent();
const dom = new JSDOM(content);
function addHeadingsToRewardMap(elName, rewardFactor) {
const addHeadingsToRewardMap = (elName: string, rewardFactor: number) => {
for (const el of dom.window.document.querySelectorAll(elName)) {
addToRewardMap(el.textContent, rewardFactor);
}
@ -146,9 +151,9 @@ function buildRewardMap(note) {
return map;
}
const mimeCache = {};
const mimeCache: Record<string, string> = {};
function trimMime(mime) {
function trimMime(mime: string) {
if (!mime || mime === 'text/html') {
return;
}
@ -173,7 +178,7 @@ function trimMime(mime) {
return mimeCache[mime];
}
function buildDateLimits(baseNote) {
function buildDateLimits(baseNote: BNote): DateLimits {
const dateCreatedTs = dateUtils.parseDateTime(baseNote.utcDateCreated).getTime();
return {
@ -193,7 +198,7 @@ const WORD_BLACKLIST = [
"than", "then", "and", "either", "or", "neither", "nor", "both", "also"
];
function splitToWords(text) {
function splitToWords(text: string) {
let words = wordCache.get(text);
if (!words) {
@ -221,13 +226,13 @@ function splitToWords(text) {
* includeNoteLink and imageLink relation mean that notes are clearly related, but so clearly
* that it doesn't actually need to be shown to the user.
*/
function hasConnectingRelation(sourceNote, targetNote) {
function hasConnectingRelation(sourceNote: BNote, targetNote: BNote) {
return sourceNote.getAttributes().find(attr => attr.type === 'relation'
&& ['includenotelink', 'imagelink'].includes(attr.name)
&& attr.value === targetNote.noteId);
}
async function findSimilarNotes(noteId) {
async function findSimilarNotes(noteId: string) {
const results = [];
let i = 0;
@ -237,23 +242,23 @@ async function findSimilarNotes(noteId) {
return [];
}
let dateLimits;
let dateLimits: DateLimits;
try {
dateLimits = buildDateLimits(baseNote);
}
catch (e) {
catch (e: any) {
throw new Error(`Date limits failed with ${e.message}, entity: ${JSON.stringify(baseNote.getPojo())}`);
}
const rewardMap = buildRewardMap(baseNote);
let ancestorRewardCache = {};
let ancestorRewardCache: Record<string, number> = {};
const ancestorNoteIds = new Set(baseNote.getAncestors().map(note => note.noteId));
ancestorNoteIds.add(baseNote.noteId);
let displayRewards = false;
function gatherRewards(text, factor = 1) {
function gatherRewards(text?: string | null, factor: number = 1) {
if (!text) {
return 0;
}
@ -279,7 +284,7 @@ async function findSimilarNotes(noteId) {
return counter;
}
function gatherAncestorRewards(note) {
function gatherAncestorRewards(note?: BNote) {
if (!note || ancestorNoteIds.has(note.noteId)) {
return 0;
}
@ -311,7 +316,7 @@ async function findSimilarNotes(noteId) {
return ancestorRewardCache[note.noteId];
}
function computeScore(candidateNote) {
function computeScore(candidateNote: BNote) {
let score = gatherRewards(trimMime(candidateNote.mime))
+ gatherAncestorRewards(candidateNote);
@ -451,11 +456,11 @@ async function findSimilarNotes(noteId) {
* see https://snyk.io/blog/nodejs-how-even-quick-async-functions-can-block-the-event-loop-starve-io/
*/
function setImmediatePromise() {
return new Promise((resolve) => {
return new Promise<void>((resolve) => {
setTimeout(() => resolve(), 0);
});
}
module.exports = {
export = {
findSimilarNotes
};

View File

@ -6,4 +6,4 @@ class ValidationError {
}
}
module.exports = ValidationError;
export = ValidationError;

View File

@ -1,4 +1,4 @@
const appInfo = require('../services/app_info.js');
const appInfo = require('../services/app_info');
const eu = require('./etapi_utils');
function register(router) {

View File

@ -1,7 +1,7 @@
const becca = require('../becca/becca');
const eu = require('./etapi_utils');
const passwordEncryptionService = require('../services/encryption/password_encryption');
const etapiTokenService = require('../services/etapi_tokens.js');
const etapiTokenService = require('../services/etapi_tokens');
function register(router, loginMiddleware) {
eu.NOT_AUTHENTICATED_ROUTE(router, 'post', '/etapi/auth/login', loginMiddleware, (req, res, next) => {

View File

@ -1,5 +1,5 @@
const eu = require('./etapi_utils');
const backupService = require('../services/backup.js');
const backupService = require('../services/backup');
function register(router) {
eu.route(router, 'put', '/etapi/backup/:backupName', async (req, res, next) => {

View File

@ -2,7 +2,7 @@ const cls = require('../services/cls');
const sql = require('../services/sql');
const log = require('../services/log');
const becca = require('../becca/becca');
const etapiTokenService = require('../services/etapi_tokens.js');
const etapiTokenService = require('../services/etapi_tokens');
const config = require('../services/config');
const GENERIC_CODE = "GENERIC";

View File

@ -2,7 +2,7 @@ const becca = require('../becca/becca');
const utils = require('../services/utils');
const eu = require('./etapi_utils');
const mappers = require('./mappers.js');
const noteService = require('../services/notes.js');
const noteService = require('../services/notes');
const TaskContext = require('../services/task_context');
const v = require('./validators.js');
const searchService = require('../services/search/services/search.js');

View File

@ -1,4 +1,4 @@
const noteTypeService = require('../services/note_types.js');
const noteTypeService = require('../services/note_types');
const dateUtils = require('../services/date_utils');
function mandatory(obj) {

View File

@ -1,6 +1,6 @@
"use strict";
const appInfo = require('../../services/app_info.js');
const appInfo = require('../../services/app_info');
function getAppInfo() {
return appInfo;

View File

@ -1,6 +1,6 @@
"use strict";
const beccaService = require('../../becca/becca_service.js');
const beccaService = require('../../becca/becca_service');
const searchService = require('../../services/search/services/search.js');
const log = require('../../services/log');
const utils = require('../../services/utils');

View File

@ -4,7 +4,7 @@ const sql = require('../../services/sql');
const utils = require('../../services/utils');
const entityChangesService = require('../../services/entity_changes');
const treeService = require('../../services/tree.js');
const eraseService = require('../../services/erase.js');
const eraseService = require('../../services/erase');
const becca = require('../../becca/becca');
const TaskContext = require('../../services/task_context');
const branchService = require('../../services/branches.js');

View File

@ -2,17 +2,17 @@
const attributeService = require('../../services/attributes.js');
const cloneService = require('../../services/cloning.js');
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const dateNoteService = require('../../services/date_notes.js');
const dateUtils = require('../../services/date_utils');
const imageService = require('../../services/image.js');
const appInfo = require('../../services/app_info.js');
const appInfo = require('../../services/app_info');
const ws = require('../../services/ws');
const log = require('../../services/log');
const utils = require('../../services/utils');
const path = require('path');
const htmlSanitizer = require('../../services/html_sanitizer.js');
const {formatAttrForSearch} = require('../../services/attribute_formatter.js');
const htmlSanitizer = require('../../services/html_sanitizer');
const {formatAttrForSearch} = require('../../services/attribute_formatter');
const jsdom = require("jsdom");
const { JSDOM } = jsdom;

View File

@ -2,8 +2,8 @@
const sql = require('../../services/sql');
const log = require('../../services/log');
const backupService = require('../../services/backup.js');
const anonymizationService = require('../../services/anonymization.js');
const backupService = require('../../services/backup');
const anonymizationService = require('../../services/anonymization');
const consistencyChecksService = require('../../services/consistency_checks.js');
function getExistingBackups() {

View File

@ -1,4 +1,4 @@
const etapiTokenService = require('../../services/etapi_tokens.js');
const etapiTokenService = require('../../services/etapi_tokens');
function getTokens() {
const tokens = etapiTokenService.getTokens();

View File

@ -3,7 +3,7 @@
const protectedSessionService = require('../../services/protected_session');
const utils = require('../../services/utils');
const log = require('../../services/log');
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const tmp = require('tmp');
const fs = require('fs');
const { Readable } = require('stream');

View File

@ -7,7 +7,7 @@ const singleImportService = require('../../services/import/single.js');
const cls = require('../../services/cls');
const path = require('path');
const becca = require('../../becca/becca');
const beccaLoader = require('../../becca/becca_loader.js');
const beccaLoader = require('../../becca/becca_loader');
const log = require('../../services/log');
const TaskContext = require('../../services/task_context');
const ValidationError = require('../../errors/validation_error');

View File

@ -1,6 +1,6 @@
"use strict";
const keyboardActions = require('../../services/keyboard_actions.js');
const keyboardActions = require('../../services/keyboard_actions');
const becca = require('../../becca/becca');
function getKeyboardActions() {

View File

@ -6,12 +6,12 @@ const dateUtils = require('../../services/date_utils');
const instanceId = require('../../services/instance_id');
const passwordEncryptionService = require('../../services/encryption/password_encryption');
const protectedSessionService = require('../../services/protected_session');
const appInfo = require('../../services/app_info.js');
const appInfo = require('../../services/app_info');
const eventService = require('../../services/events');
const sqlInit = require('../../services/sql_init.js');
const sqlInit = require('../../services/sql_init');
const sql = require('../../services/sql');
const ws = require('../../services/ws');
const etapiTokenService = require('../../services/etapi_tokens.js');
const etapiTokenService = require('../../services/etapi_tokens');
function loginSync(req) {
if (!sqlInit.schemaExists()) {

View File

@ -1,7 +1,7 @@
"use strict";
const noteService = require('../../services/notes.js');
const eraseService = require('../../services/erase.js');
const noteService = require('../../services/notes');
const eraseService = require('../../services/erase');
const treeService = require('../../services/tree.js');
const sql = require('../../services/sql');
const utils = require('../../services/utils');

View File

@ -2,7 +2,7 @@
const sql = require('../../services/sql');
const protectedSessionService = require('../../services/protected_session');
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const becca = require('../../becca/becca');
function getRecentChanges(req) {

View File

@ -1,14 +1,14 @@
"use strict";
const beccaService = require('../../becca/becca_service.js');
const revisionService = require('../../services/revisions.js');
const beccaService = require('../../becca/becca_service');
const revisionService = require('../../services/revisions');
const utils = require('../../services/utils');
const sql = require('../../services/sql');
const cls = require('../../services/cls');
const path = require('path');
const becca = require('../../becca/becca');
const blobService = require('../../services/blob');
const eraseService = require("../../services/erase.js");
const eraseService = require("../../services/erase");
function getRevisionBlob(req) {
const preview = req.query.preview === 'true';

View File

@ -5,7 +5,7 @@ const SearchContext = require('../../services/search/search_context.js');
const searchService = require('../../services/search/services/search.js');
const bulkActionService = require('../../services/bulk_actions.js');
const cls = require('../../services/cls');
const {formatAttrForSearch} = require('../../services/attribute_formatter.js');
const {formatAttrForSearch} = require('../../services/attribute_formatter');
const ValidationError = require('../../errors/validation_error');
function searchFromNote(req) {

View File

@ -2,7 +2,7 @@
const imageType = require('image-type');
const imageService = require('../../services/image.js');
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const {sanitizeAttributeName} = require('../../services/sanitize_attribute_name');
const specialNotesService = require('../../services/special_notes.js');

View File

@ -1,9 +1,9 @@
"use strict";
const sqlInit = require('../../services/sql_init.js');
const sqlInit = require('../../services/sql_init');
const setupService = require('../../services/setup.js');
const log = require('../../services/log');
const appInfo = require('../../services/app_info.js');
const appInfo = require('../../services/app_info');
function getStatus() {
return {

View File

@ -1,6 +1,6 @@
"use strict";
const similarityService = require('../../becca/similarity.js');
const similarityService = require('../../becca/similarity');
const becca = require('../../becca/becca');
async function getSimilarNotes(req) {

View File

@ -4,11 +4,11 @@ const syncService = require('../../services/sync.js');
const syncUpdateService = require('../../services/sync_update.js');
const entityChangesService = require('../../services/entity_changes');
const sql = require('../../services/sql');
const sqlInit = require('../../services/sql_init.js');
const sqlInit = require('../../services/sql_init');
const optionService = require('../../services/options');
const contentHashService = require('../../services/content_hash.js');
const log = require('../../services/log');
const syncOptions = require('../../services/sync_options.js');
const syncOptions = require('../../services/sync_options');
const utils = require('../../services/utils');
const ws = require('../../services/ws');

View File

@ -1,4 +1,4 @@
const assetPath = require('../services/asset_path.js');
const assetPath = require('../services/asset_path');
const path = require("path");
const express = require("express");
const env = require('../services/env');

View File

@ -9,8 +9,8 @@ const env = require('../services/env');
const utils = require('../services/utils');
const protectedSessionService = require('../services/protected_session');
const packageJson = require('../../package.json');
const assetPath = require('../services/asset_path.js');
const appPath = require('../services/app_path.js');
const assetPath = require('../services/asset_path');
const appPath = require('../services/app_path');
function index(req, res) {
const options = optionService.getOptionMap();

View File

@ -5,8 +5,8 @@ const optionService = require('../services/options');
const myScryptService = require('../services/encryption/my_scrypt');
const log = require('../services/log');
const passwordService = require('../services/encryption/password');
const assetPath = require('../services/asset_path.js');
const appPath = require('../services/app_path.js');
const assetPath = require('../services/asset_path');
const appPath = require('../services/app_path');
const ValidationError = require('../errors/validation_error');
function loginPage(req, res) {

View File

@ -5,7 +5,7 @@ const multer = require('multer');
const log = require('../services/log');
const express = require('express');
const router = express.Router();
const auth = require('../services/auth.js');
const auth = require('../services/auth');
const cls = require('../services/cls');
const sql = require('../services/sql');
const entityChangesService = require('../services/entity_changes');
@ -28,14 +28,14 @@ const branchesApiRoute = require('./api/branches.js');
const attachmentsApiRoute = require('./api/attachments.js');
const autocompleteApiRoute = require('./api/autocomplete.js');
const cloningApiRoute = require('./api/cloning.js');
const revisionsApiRoute = require('./api/revisions.js');
const revisionsApiRoute = require('./api/revisions');
const recentChangesApiRoute = require('./api/recent_changes.js');
const optionsApiRoute = require('./api/options.js');
const passwordApiRoute = require('./api/password');
const syncApiRoute = require('./api/sync.js');
const loginApiRoute = require('./api/login.js');
const recentNotesRoute = require('./api/recent_notes.js');
const appInfoRoute = require('./api/app_info.js');
const appInfoRoute = require('./api/app_info');
const exportRoute = require('./api/export.js');
const importRoute = require('./api/import.js');
const setupApiRoute = require('./api/setup.js');
@ -56,20 +56,20 @@ const keysRoute = require('./api/keys.js');
const backendLogRoute = require('./api/backend_log.js');
const statsRoute = require('./api/stats.js');
const fontsRoute = require('./api/fonts.js');
const etapiTokensApiRoutes = require('./api/etapi_tokens.js');
const etapiTokensApiRoutes = require('./api/etapi_tokens');
const relationMapApiRoute = require('./api/relation-map');
const otherRoute = require('./api/other.js');
const shareRoutes = require('../share/routes.js');
const etapiAuthRoutes = require('../etapi/auth.js');
const etapiAppInfoRoutes = require('../etapi/app_info.js');
const etapiAppInfoRoutes = require('../etapi/app_info');
const etapiAttachmentRoutes = require('../etapi/attachments.js');
const etapiAttributeRoutes = require('../etapi/attributes.js');
const etapiBranchRoutes = require('../etapi/branches.js');
const etapiNoteRoutes = require('../etapi/notes.js');
const etapiSpecialNoteRoutes = require('../etapi/special_notes.js');
const etapiSpecRoute = require('../etapi/spec.js');
const etapiBackupRoute = require('../etapi/backup.js');
const etapiBackupRoute = require('../etapi/backup');
const csrfMiddleware = csurf({
cookie: true,

View File

@ -1,10 +1,10 @@
"use strict";
const sqlInit = require('../services/sql_init.js');
const sqlInit = require('../services/sql_init');
const setupService = require('../services/setup.js');
const utils = require('../services/utils');
const assetPath = require('../services/asset_path.js');
const appPath = require('../services/app_path.js');
const assetPath = require('../services/asset_path');
const appPath = require('../services/app_path');
function setupPage(req, res) {
if (sqlInit.isDbInitialized()) {

View File

@ -1,10 +1,10 @@
const BUILTIN_ATTRIBUTES = require('./builtin_attributes.js');
const fs = require("fs-extra");
const dataDir = require('./data_dir');
const dateUtils = require('./date_utils');
const Database = require("better-sqlite3");
const sql = require('./sql');
const path = require("path");
import BUILTIN_ATTRIBUTES = require('./builtin_attributes');
import fs = require("fs-extra");
import dataDir = require('./data_dir');
import dateUtils = require('./date_utils');
import Database = require("better-sqlite3");
import sql = require('./sql');
import path = require("path");
function getFullAnonymizationScript() {
// we want to delete all non-builtin attributes because they can contain sensitive names and values
@ -48,7 +48,7 @@ function getLightAnonymizationScript() {
AND value != '';`;
}
async function createAnonymizedCopy(type) {
async function createAnonymizedCopy(type: "full" | "light") {
if (!['full', 'light'].includes(type)) {
throw new Error(`Unrecognized anonymization type '${type}'`);
}

View File

@ -1,12 +1,12 @@
"use strict";
const path = require('path');
const {ELECTRON_APP_ROOT_DIR} = require('./resource_dir');
const log = require('./log');
const os = require('os');
const fs = require('fs');
const config = require('./config');
const utils = require('./utils');
import path = require('path');
import resourceDir = require('./resource_dir');
import log = require('./log');
import os = require('os');
import fs = require('fs');
import config = require('./config');
import utils = require('./utils');
const template = `[Desktop Entry]
Type=Application
@ -28,7 +28,7 @@ function installLocalAppIcon() {
return;
}
if (!fs.existsSync(path.resolve(ELECTRON_APP_ROOT_DIR, "trilium-portable.sh"))) {
if (!fs.existsSync(path.resolve(resourceDir.ELECTRON_APP_ROOT_DIR, "trilium-portable.sh"))) {
// simple heuristic to detect ".tar.xz" linux build (i.e., not flatpak, not debian)
// only in such case it's necessary to create an icon
return;
@ -56,16 +56,16 @@ function installLocalAppIcon() {
function getDesktopFileContent() {
return template
.replace("#APP_ROOT_DIR#", escapePath(ELECTRON_APP_ROOT_DIR))
.replace("#APP_ROOT_DIR#", escapePath(resourceDir.ELECTRON_APP_ROOT_DIR))
.replace("#EXE_PATH#", escapePath(getExePath()));
}
function escapePath(path) {
function escapePath(path: string) {
return path.replace(/ /g, "\\ ");
}
function getExePath() {
return path.resolve(ELECTRON_APP_ROOT_DIR, 'trilium');
return path.resolve(resourceDir.ELECTRON_APP_ROOT_DIR, 'trilium');
}
module.exports = {

View File

@ -1,21 +1,21 @@
"use strict";
const build = require('./build.js');
const packageJson = require('../../package.json');
const {TRILIUM_DATA_DIR} = require('./data_dir');
import build = require('./build');
import packageJson = require('../../package.json');
import dataDir = require('./data_dir');
const APP_DB_VERSION = 228;
const SYNC_VERSION = 32;
const CLIPPER_PROTOCOL_VERSION = "1.0";
module.exports = {
export = {
appVersion: packageJson.version,
dbVersion: APP_DB_VERSION,
nodeVersion: process.version,
syncVersion: SYNC_VERSION,
buildDate: build.buildDate,
buildRevision: build.buildRevision,
dataDirectory: TRILIUM_DATA_DIR,
dataDirectory: dataDir.TRILIUM_DATA_DIR,
clipperProtocolVersion: CLIPPER_PROTOCOL_VERSION,
utcDateTime: new Date().toISOString() // for timezone inference
};

View File

@ -1,6 +0,0 @@
const assetPath = require('./asset_path.js');
const env = require('./env');
module.exports = env.isDev()
? assetPath + "/app"
: assetPath + "/app-dist";

6
src/services/app_path.ts Normal file
View File

@ -0,0 +1,6 @@
import assetPath = require('./asset_path');
import env = require('./env');
export = env.isDev()
? assetPath + "/app"
: assetPath + "/app-dist";

View File

@ -1,3 +0,0 @@
const packageJson = require('../../package.json');
module.exports = `assets/v${packageJson.version}`;

View File

@ -0,0 +1,3 @@
import packageJson = require('../../package.json');
export = `assets/v${packageJson.version}`;

View File

@ -1,6 +1,8 @@
"use strict";
function formatAttrForSearch(attr, searchWithValue) {
import BAttribute = require("../becca/entities/battribute");
function formatAttrForSearch(attr: BAttribute, searchWithValue: string) {
let searchStr = '';
if (attr.type === 'label') {
@ -27,7 +29,7 @@ function formatAttrForSearch(attr, searchWithValue) {
return searchStr;
}
function formatValue(val) {
function formatValue(val: string) {
if (!/[^\w]/.test(val)) {
return val;
}
@ -45,6 +47,6 @@ function formatValue(val) {
}
}
module.exports = {
export = {
formatAttrForSearch
};

View File

@ -4,8 +4,8 @@ const searchService = require('./search/services/search.js');
const sql = require('./sql');
const becca = require('../becca/becca');
const BAttribute = require('../becca/entities/battribute');
const {formatAttrForSearch} = require('./attribute_formatter.js');
const BUILTIN_ATTRIBUTES = require('./builtin_attributes.js');
const {formatAttrForSearch} = require('./attribute_formatter');
const BUILTIN_ATTRIBUTES = require('./builtin_attributes');
const ATTRIBUTE_TYPES = ['label', 'relation'];

View File

@ -1,16 +1,27 @@
"use strict";
const etapiTokenService = require('./etapi_tokens.js');
const log = require('./log');
const sqlInit = require('./sql_init.js');
const utils = require('./utils');
const passwordEncryptionService = require('./encryption/password_encryption');
const config = require('./config');
const passwordService = require('./encryption/password');
import etapiTokenService = require('./etapi_tokens');
import log = require('./log');
import sqlInit = require('./sql_init');
import utils = require('./utils');
import passwordEncryptionService = require('./encryption/password_encryption');
import config = require('./config');
import passwordService = require('./encryption/password');
import type { NextFunction, Request, Response } from 'express';
const noAuthentication = config.General && config.General.noAuthentication === true;
function checkAuth(req, res, next) {
interface AppRequest extends Request {
headers: {
authorization?: string;
"trilium-cred"?: string;
}
session: {
loggedIn: boolean;
}
}
function checkAuth(req: AppRequest, res: Response, next: NextFunction) {
if (!sqlInit.isDbInitialized()) {
res.redirect("setup");
}
@ -24,7 +35,7 @@ function checkAuth(req, res, next) {
// for electron things which need network stuff
// currently, we're doing that for file upload because handling form data seems to be difficult
function checkApiAuthOrElectron(req, res, next) {
function checkApiAuthOrElectron(req: AppRequest, res: Response, next: NextFunction) {
if (!req.session.loggedIn && !utils.isElectron() && !noAuthentication) {
reject(req, res, "Logged in session not found");
}
@ -33,7 +44,7 @@ function checkApiAuthOrElectron(req, res, next) {
}
}
function checkApiAuth(req, res, next) {
function checkApiAuth(req: AppRequest, res: Response, next: NextFunction) {
if (!req.session.loggedIn && !noAuthentication) {
reject(req, res, "Logged in session not found");
}
@ -42,7 +53,7 @@ function checkApiAuth(req, res, next) {
}
}
function checkAppInitialized(req, res, next) {
function checkAppInitialized(req: AppRequest, res: Response, next: NextFunction) {
if (!sqlInit.isDbInitialized()) {
res.redirect("setup");
}
@ -51,7 +62,7 @@ function checkAppInitialized(req, res, next) {
}
}
function checkPasswordSet(req, res, next) {
function checkPasswordSet(req: AppRequest, res: Response, next: NextFunction) {
if (!utils.isElectron() && !passwordService.isPasswordSet()) {
res.redirect("set-password");
} else {
@ -59,7 +70,7 @@ function checkPasswordSet(req, res, next) {
}
}
function checkPasswordNotSet(req, res, next) {
function checkPasswordNotSet(req: AppRequest, res: Response, next: NextFunction) {
if (!utils.isElectron() && passwordService.isPasswordSet()) {
res.redirect("login");
} else {
@ -67,7 +78,7 @@ function checkPasswordNotSet(req, res, next) {
}
}
function checkAppNotInitialized(req, res, next) {
function checkAppNotInitialized(req: AppRequest, res: Response, next: NextFunction) {
if (sqlInit.isDbInitialized()) {
reject(req, res, "App already initialized.");
}
@ -76,7 +87,7 @@ function checkAppNotInitialized(req, res, next) {
}
}
function checkEtapiToken(req, res, next) {
function checkEtapiToken(req: AppRequest, res: Response, next: NextFunction) {
if (etapiTokenService.isValidAuthHeader(req.headers.authorization)) {
next();
}
@ -85,7 +96,7 @@ function checkEtapiToken(req, res, next) {
}
}
function reject(req, res, message) {
function reject(req: AppRequest, res: Response, message: string) {
log.info(`${req.method} ${req.path} rejected with 401 ${message}`);
res.setHeader("Content-Type", "text/plain")
@ -93,7 +104,7 @@ function reject(req, res, message) {
.send(message);
}
function checkCredentials(req, res, next) {
function checkCredentials(req: AppRequest, res: Response, next: NextFunction) {
if (!sqlInit.isDbInitialized()) {
res.setHeader("Content-Type", "text/plain")
.status(400)
@ -109,7 +120,7 @@ function checkCredentials(req, res, next) {
}
const header = req.headers['trilium-cred'] || '';
const auth = new Buffer.from(header, 'base64').toString();
const auth = Buffer.from(header, 'base64').toString();
const colonIndex = auth.indexOf(':');
const password = colonIndex === -1 ? "" : auth.substr(colonIndex + 1);
// username is ignored
@ -124,7 +135,7 @@ function checkCredentials(req, res, next) {
}
}
module.exports = {
export = {
checkAuth,
checkApiAuth,
checkAppInitialized,

View File

@ -1,5 +1,5 @@
const log = require('./log');
const noteService = require('./notes.js');
const noteService = require('./notes');
const sql = require('./sql');
const utils = require('./utils');
const attributeService = require('./attributes.js');
@ -10,7 +10,7 @@ const axios = require('axios');
const dayjs = require('dayjs');
const xml2js = require('xml2js');
const cloningService = require('./cloning.js');
const appInfo = require('./app_info.js');
const appInfo = require('./app_info');
const searchService = require('./search/services/search.js');
const SearchContext = require('./search/search_context.js');
const becca = require('../becca/becca');
@ -20,7 +20,7 @@ const specialNotesService = require('./special_notes.js');
const branchService = require('./branches.js');
const exportService = require('./export/zip.js');
const syncMutex = require('./sync_mutex');
const backupService = require('./backup.js');
const backupService = require('./backup');
const optionsService = require('./options');

View File

@ -1,14 +1,16 @@
"use strict";
const dateUtils = require('./date_utils');
const optionService = require('./options');
const fs = require('fs-extra');
const dataDir = require('./data_dir');
const log = require('./log');
const syncMutexService = require('./sync_mutex');
const cls = require('./cls');
const sql = require('./sql');
const path = require('path');
import dateUtils = require('./date_utils');
import optionService = require('./options');
import fs = require('fs-extra');
import dataDir = require('./data_dir');
import log = require('./log');
import syncMutexService = require('./sync_mutex');
import cls = require('./cls');
import sql = require('./sql');
import path = require('path');
type BackupType = ("daily" | "weekly" | "monthly");
function getExistingBackups() {
if (!fs.existsSync(dataDir.BACKUP_DIR)) {
@ -35,13 +37,13 @@ function regularBackup() {
});
}
function isBackupEnabled(backupType) {
function isBackupEnabled(backupType: BackupType) {
const optionName = `${backupType}BackupEnabled`;
return optionService.getOptionBool(optionName);
}
function periodBackup(optionName, backupType, periodInSeconds) {
function periodBackup(optionName: string, backupType: BackupType, periodInSeconds: number) {
if (!isBackupEnabled(backupType)) {
return;
}
@ -56,7 +58,7 @@ function periodBackup(optionName, backupType, periodInSeconds) {
}
}
async function backupNow(name) {
async function backupNow(name: string) {
// we don't want to back up DB in the middle of sync with potentially inconsistent DB state
return await syncMutexService.doExclusively(async () => {
const backupFile = `${dataDir.BACKUP_DIR}/backup-${name}.db`;
@ -73,7 +75,7 @@ if (!fs.existsSync(dataDir.BACKUP_DIR)) {
fs.mkdirSync(dataDir.BACKUP_DIR, 0o700);
}
module.exports = {
export = {
getExistingBackups,
backupNow,
regularBackup

View File

@ -1 +0,0 @@
module.exports = { buildDate:"2024-03-28T07:11:39+01:00", buildRevision: "399458b52f250b22be22d980a78de0b3390d7521" };

1
src/services/build.ts Normal file
View File

@ -0,0 +1 @@
export = { buildDate:"2024-03-28T07:11:39+01:00", buildRevision: "399458b52f250b22be22d980a78de0b3390d7521" };

View File

@ -1,4 +1,4 @@
module.exports = [
export = [
// label names
{ type: 'label', name: 'inbox' },
{ type: 'label', name: 'disableVersioning' },

View File

@ -1,10 +1,10 @@
const log = require('./log');
const revisionService = require('./revisions.js');
const revisionService = require('./revisions');
const becca = require('../becca/becca');
const cloningService = require('./cloning.js');
const branchService = require('./branches.js');
const utils = require('./utils');
const eraseService = require("./erase.js");
const eraseService = require("./erase");
const ACTION_HANDLERS = {
addLabel: (action, note) => {

View File

@ -1,7 +1,7 @@
"use strict";
const sql = require('./sql');
const sqlInit = require('./sql_init.js');
const sqlInit = require('./sql_init');
const log = require('./log');
const ws = require('./ws');
const syncMutexService = require('./sync_mutex');
@ -9,12 +9,12 @@ const cls = require('./cls');
const entityChangesService = require('./entity_changes');
const optionsService = require('./options');
const BBranch = require('../becca/entities/bbranch');
const revisionService = require('./revisions.js');
const revisionService = require('./revisions');
const becca = require('../becca/becca');
const utils = require('../services/utils');
const eraseService = require('../services/erase.js');
const eraseService = require('../services/erase');
const {sanitizeAttributeName} = require('./sanitize_attribute_name');
const noteTypes = require('../services/note_types.js').getNoteTypeNames();
const noteTypes = require('../services/note_types').getNoteTypeNames();
class ConsistencyChecks {
/**
@ -766,7 +766,7 @@ class ConsistencyChecks {
}
if (this.reloadNeeded) {
require('../becca/becca_loader.js').reload("consistency checks need becca reload");
require('../becca/becca_loader').reload("consistency checks need becca reload");
}
return !this.unrecoveredConsistencyErrors;

View File

@ -3,7 +3,7 @@
const sql = require('./sql');
const utils = require('./utils');
const log = require('./log');
const eraseService = require('./erase.js');
const eraseService = require('./erase');
function getEntityHashes() {
// blob erasure is not synced, we should check before each sync if there's some blob to erase

View File

@ -1,6 +1,6 @@
"use strict";
const noteService = require('./notes.js');
const noteService = require('./notes');
const attributeService = require('./attributes.js');
const dateUtils = require('./date_utils');
const sql = require('./sql');

View File

@ -65,7 +65,7 @@ function getDateTimeForFile() {
return new Date().toISOString().substr(0, 19).replace(/:/g, '');
}
function validateLocalDateTime(str: string) {
function validateLocalDateTime(str: string | null | undefined) {
if (!str) {
return;
}
@ -80,7 +80,7 @@ function validateLocalDateTime(str: string) {
}
}
function validateUtcDateTime(str: string) {
function validateUtcDateTime(str: string | undefined) {
if (!str) {
return;
}

View File

@ -78,7 +78,7 @@ function resetPassword() {
};
}
module.exports = {
export = {
isPasswordSet,
changePassword,
setPassword,

View File

@ -45,7 +45,7 @@ function putEntityChange(origEntityChange: EntityChange) {
cls.putEntityChange(ec);
}
function putNoteReorderingEntityChange(parentNoteId: string, componentId: string) {
function putNoteReorderingEntityChange(parentNoteId: string, componentId?: string) {
putEntityChange({
entityName: "note_reordering",
entityId: parentNoteId,

View File

@ -1,13 +1,14 @@
const sql = require("./sql");
const revisionService = require("./revisions.js");
const log = require("./log");
const entityChangesService = require("./entity_changes");
const optionService = require("./options");
const dateUtils = require("./date_utils");
const sqlInit = require("./sql_init.js");
const cls = require("./cls");
import sql = require("./sql");
import revisionService = require("./revisions");
import log = require("./log");
import entityChangesService = require("./entity_changes");
import optionService = require("./options");
import dateUtils = require("./date_utils");
import sqlInit = require("./sql_init");
import cls = require("./cls");
import { EntityChange } from "./entity_changes_interface";
function eraseNotes(noteIdsToErase) {
function eraseNotes(noteIdsToErase: string[]) {
if (noteIdsToErase.length === 0) {
return;
}
@ -16,17 +17,17 @@ function eraseNotes(noteIdsToErase) {
setEntityChangesAsErased(sql.getManyRows(`SELECT * FROM entity_changes WHERE entityName = 'notes' AND entityId IN (???)`, noteIdsToErase));
// we also need to erase all "dependent" entities of the erased notes
const branchIdsToErase = sql.getManyRows(`SELECT branchId FROM branches WHERE noteId IN (???)`, noteIdsToErase)
const branchIdsToErase = sql.getManyRows<{ branchId: string }>(`SELECT branchId FROM branches WHERE noteId IN (???)`, noteIdsToErase)
.map(row => row.branchId);
eraseBranches(branchIdsToErase);
const attributeIdsToErase = sql.getManyRows(`SELECT attributeId FROM attributes WHERE noteId IN (???)`, noteIdsToErase)
const attributeIdsToErase = sql.getManyRows<{ attributeId: string }>(`SELECT attributeId FROM attributes WHERE noteId IN (???)`, noteIdsToErase)
.map(row => row.attributeId);
eraseAttributes(attributeIdsToErase);
const revisionIdsToErase = sql.getManyRows(`SELECT revisionId FROM revisions WHERE noteId IN (???)`, noteIdsToErase)
const revisionIdsToErase = sql.getManyRows<{ revisionId: string }>(`SELECT revisionId FROM revisions WHERE noteId IN (???)`, noteIdsToErase)
.map(row => row.revisionId);
eraseRevisions(revisionIdsToErase);
@ -34,7 +35,7 @@ function eraseNotes(noteIdsToErase) {
log.info(`Erased notes: ${JSON.stringify(noteIdsToErase)}`);
}
function setEntityChangesAsErased(entityChanges) {
function setEntityChangesAsErased(entityChanges: EntityChange[]) {
for (const ec of entityChanges) {
ec.isErased = true;
// we're not changing hash here, not sure if good or not
@ -45,7 +46,7 @@ function setEntityChangesAsErased(entityChanges) {
}
}
function eraseBranches(branchIdsToErase) {
function eraseBranches(branchIdsToErase: string[]) {
if (branchIdsToErase.length === 0) {
return;
}
@ -57,7 +58,7 @@ function eraseBranches(branchIdsToErase) {
log.info(`Erased branches: ${JSON.stringify(branchIdsToErase)}`);
}
function eraseAttributes(attributeIdsToErase) {
function eraseAttributes(attributeIdsToErase: string[]) {
if (attributeIdsToErase.length === 0) {
return;
}
@ -69,7 +70,7 @@ function eraseAttributes(attributeIdsToErase) {
log.info(`Erased attributes: ${JSON.stringify(attributeIdsToErase)}`);
}
function eraseAttachments(attachmentIdsToErase) {
function eraseAttachments(attachmentIdsToErase: string[]) {
if (attachmentIdsToErase.length === 0) {
return;
}
@ -81,7 +82,7 @@ function eraseAttachments(attachmentIdsToErase) {
log.info(`Erased attachments: ${JSON.stringify(attachmentIdsToErase)}`);
}
function eraseRevisions(revisionIdsToErase) {
function eraseRevisions(revisionIdsToErase: string[]) {
if (revisionIdsToErase.length === 0) {
return;
}
@ -116,7 +117,7 @@ function eraseUnusedBlobs() {
log.info(`Erased unused blobs: ${JSON.stringify(unusedBlobIds)}`);
}
function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds = null) {
function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds: number | null = null) {
// this is important also so that the erased entity changes are sent to the connected clients
sql.transactional(() => {
if (eraseEntitiesAfterTimeInSeconds === null) {
@ -125,41 +126,33 @@ function eraseDeletedEntities(eraseEntitiesAfterTimeInSeconds = null) {
const cutoffDate = new Date(Date.now() - eraseEntitiesAfterTimeInSeconds * 1000);
const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
const noteIdsToErase = sql.getColumn<string>("SELECT noteId FROM notes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
eraseNotes(noteIdsToErase);
const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
const branchIdsToErase = sql.getColumn<string>("SELECT branchId FROM branches WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
eraseBranches(branchIdsToErase);
const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
const attributeIdsToErase = sql.getColumn<string>("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
eraseAttributes(attributeIdsToErase);
const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
const attachmentIdsToErase = sql.getColumn<string>("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND utcDateModified <= ?", [dateUtils.utcDateTimeStr(cutoffDate)]);
eraseAttachments(attachmentIdsToErase);
eraseUnusedBlobs();
});
}
function eraseNotesWithDeleteId(deleteId) {
const noteIdsToErase = sql.getColumn("SELECT noteId FROM notes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
function eraseNotesWithDeleteId(deleteId: string) {
const noteIdsToErase = sql.getColumn<string>("SELECT noteId FROM notes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
eraseNotes(noteIdsToErase);
const branchIdsToErase = sql.getColumn("SELECT branchId FROM branches WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
const branchIdsToErase = sql.getColumn<string>("SELECT branchId FROM branches WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
eraseBranches(branchIdsToErase);
const attributeIdsToErase = sql.getColumn("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
const attributeIdsToErase = sql.getColumn<string>("SELECT attributeId FROM attributes WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
eraseAttributes(attributeIdsToErase);
const attachmentIdsToErase = sql.getColumn("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
const attachmentIdsToErase = sql.getColumn<string>("SELECT attachmentId FROM attachments WHERE isDeleted = 1 AND deleteId = ?", [deleteId]);
eraseAttachments(attachmentIdsToErase);
eraseUnusedBlobs();
@ -173,13 +166,13 @@ function eraseUnusedAttachmentsNow() {
eraseScheduledAttachments(0);
}
function eraseScheduledAttachments(eraseUnusedAttachmentsAfterSeconds = null) {
function eraseScheduledAttachments(eraseUnusedAttachmentsAfterSeconds: number | null = null) {
if (eraseUnusedAttachmentsAfterSeconds === null) {
eraseUnusedAttachmentsAfterSeconds = optionService.getOptionInt('eraseUnusedAttachmentsAfterSeconds');
}
const cutOffDate = dateUtils.utcDateTimeStr(new Date(Date.now() - (eraseUnusedAttachmentsAfterSeconds * 1000)));
const attachmentIdsToErase = sql.getColumn('SELECT attachmentId FROM attachments WHERE utcDateScheduledForErasureSince < ?', [cutOffDate]);
const attachmentIdsToErase = sql.getColumn<string>('SELECT attachmentId FROM attachments WHERE utcDateScheduledForErasureSince < ?', [cutOffDate]);
eraseAttachments(attachmentIdsToErase);
}
@ -193,7 +186,7 @@ sqlInit.dbReady.then(() => {
setInterval(cls.wrap(() => eraseScheduledAttachments()), 3600 * 1000);
});
module.exports = {
export = {
eraseDeletedNotesNow,
eraseUnusedAttachmentsNow,
eraseNotesWithDeleteId,

View File

@ -1,17 +1,17 @@
const becca = require('../becca/becca');
const utils = require('./utils');
const BEtapiToken = require('../becca/entities/betapi_token');
const crypto = require("crypto");
import becca = require('../becca/becca');
import utils = require('./utils');
import BEtapiToken = require('../becca/entities/betapi_token');
import crypto = require("crypto");
function getTokens() {
return becca.getEtapiTokens();
}
function getTokenHash(token) {
function getTokenHash(token: crypto.BinaryLike) {
return crypto.createHash('sha256').update(token).digest('base64');
}
function createToken(tokenName) {
function createToken(tokenName: string) {
const token = utils.randomSecureToken(32);
const tokenHash = getTokenHash(token);
@ -25,7 +25,7 @@ function createToken(tokenName) {
};
}
function parseAuthToken(auth) {
function parseAuthToken(auth: string | undefined) {
if (!auth) {
return null;
}
@ -64,7 +64,7 @@ function parseAuthToken(auth) {
}
}
function isValidAuthHeader(auth) {
function isValidAuthHeader(auth: string | undefined) {
const parsed = parseAuthToken(auth);
if (!parsed) {
@ -93,7 +93,7 @@ function isValidAuthHeader(auth) {
}
}
function renameToken(etapiTokenId, newName) {
function renameToken(etapiTokenId: string, newName: string) {
const etapiToken = becca.getEtapiToken(etapiTokenId);
if (!etapiToken) {
@ -104,7 +104,7 @@ function renameToken(etapiTokenId, newName) {
etapiToken.save();
}
function deleteToken(etapiTokenId) {
function deleteToken(etapiTokenId: string) {
const etapiToken = becca.getEtapiToken(etapiTokenId);
if (!etapiToken) {
@ -114,7 +114,7 @@ function deleteToken(etapiTokenId) {
etapiToken.markAsDeletedSimple();
}
module.exports = {
export = {
getTokens,
createToken,
renameToken,

View File

@ -16,9 +16,9 @@ const archiver = require('archiver');
const log = require('../log');
const TaskContext = require('../task_context');
const ValidationError = require('../../errors/validation_error');
const NoteMeta = require('../meta/note_meta.js');
const AttachmentMeta = require('../meta/attachment_meta.js');
const AttributeMeta = require('../meta/attribute_meta.js');
const NoteMeta = require('../meta/note_meta');
const AttachmentMeta = require('../meta/attachment_meta');
const AttributeMeta = require('../meta/attribute_meta');
/**
* @param {TaskContext} taskContext

View File

@ -1,10 +1,10 @@
const eventService = require('./events');
const scriptService = require('./script.js');
const treeService = require('./tree.js');
const noteService = require('./notes.js');
const noteService = require('./notes');
const becca = require('../becca/becca');
const BAttribute = require('../becca/entities/battribute');
const hiddenSubtreeService = require('./hidden_subtree.js');
const hiddenSubtreeService = require('./hidden_subtree');
const oneTimeTimer = require('./one_time_timer.js');
function runAttachedRelations(note, relationName, originEntity) {

View File

@ -1,8 +1,10 @@
const becca = require('../becca/becca');
const noteService = require('./notes.js');
const BAttribute = require('../becca/entities/battribute');
const log = require('./log');
const migrationService = require('./migration.js');
import BAttribute = require("../becca/entities/battribute");
import { AttributeType, NoteType } from "../becca/entities/rows";
import becca = require('../becca/becca');
import noteService = require('./notes');
import log = require('./log');
import migrationService = require('./migration');
const LBTPL_ROOT = "_lbTplRoot";
const LBTPL_BASE = "_lbTplBase";
@ -13,13 +15,36 @@ const LBTPL_BUILTIN_WIDGET = "_lbTplBuiltinWidget";
const LBTPL_SPACER = "_lbTplSpacer";
const LBTPL_CUSTOM_WIDGET = "_lbTplCustomWidget";
interface Attribute {
type: AttributeType;
name: string;
isInheritable?: boolean;
value?: string
}
interface Item {
notePosition?: number;
id: string;
title: string;
type: NoteType;
icon?: string;
attributes?: Attribute[];
children?: Item[];
isExpanded?: boolean;
baseSize?: string;
growthFactor?: string;
targetNoteId?: "_backendLog" | "_globalNoteMap";
builtinWidget?: "bookmarks" | "spacer" | "backInHistoryButton" | "forwardInHistoryButton" | "syncStatus" | "protectedSession" | "todayInJournal" | "calendar";
command?: "jumpToNote" | "searchNotes" | "createNoteIntoInbox" | "showRecentChanges";
}
/*
* Hidden subtree is generated as a "predictable structure" which means that it avoids generating random IDs to always
* produce the same structure. This is needed because it is run on multiple instances in the sync cluster which might produce
* duplicate subtrees. This way, all instances will generate the same structure with the same IDs.
*/
const HIDDEN_SUBTREE_DEFINITION = {
const HIDDEN_SUBTREE_DEFINITION: Item = {
id: '_hidden',
title: 'Hidden Notes',
type: 'doc',
@ -244,7 +269,7 @@ function checkHiddenSubtree(force = false) {
checkHiddenSubtreeRecursively('root', HIDDEN_SUBTREE_DEFINITION);
}
function checkHiddenSubtreeRecursively(parentNoteId, item) {
function checkHiddenSubtreeRecursively(parentNoteId: string, item: Item) {
if (!item.id || !item.type || !item.title) {
throw new Error(`Item does not contain mandatory properties: ${JSON.stringify(item)}`);
}
@ -337,7 +362,7 @@ function checkHiddenSubtreeRecursively(parentNoteId, item) {
}
}
module.exports = {
export = {
checkHiddenSubtree,
LBTPL_ROOT,
LBTPL_BASE,

View File

@ -1,18 +1,18 @@
const sanitizeHtml = require('sanitize-html');
const sanitizeUrl = require('@braintree/sanitize-url').sanitizeUrl;
import sanitizeHtml = require('sanitize-html');
import sanitizeUrl = require('@braintree/sanitize-url');
// intended mainly as protection against XSS via import
// secondarily, it (partly) protects against "CSS takeover"
// sanitize also note titles, label values etc. - there are so many usages which make it difficult
// to guarantee all of them are properly handled
function sanitize(dirtyHtml) {
function sanitize(dirtyHtml: string) {
if (!dirtyHtml) {
return dirtyHtml;
}
// avoid H1 per https://github.com/zadam/trilium/issues/1552
// demote H1, and if that conflicts with existing H2, demote that, etc
const transformTags = {};
const transformTags: Record<string, string> = {};
const lowercasedHtml = dirtyHtml.toLowerCase();
for (let i = 1; i < 6; ++i) {
if (lowercasedHtml.includes(`<h${i}`)) {
@ -47,9 +47,9 @@ function sanitize(dirtyHtml) {
});
}
module.exports = {
export = {
sanitize,
sanitizeUrl: url => {
return sanitizeUrl(url).trim();
sanitizeUrl: (url: string) => {
return sanitizeUrl.sanitizeUrl(url).trim();
}
};

View File

@ -3,7 +3,7 @@
const becca = require('../becca/becca');
const log = require('./log');
const protectedSessionService = require('./protected_session');
const noteService = require('./notes.js');
const noteService = require('./notes');
const optionService = require('./options');
const sql = require('./sql');
const jimp = require('jimp');
@ -11,7 +11,7 @@ const imageType = require('image-type');
const sanitizeFilename = require('sanitize-filename');
const isSvg = require('is-svg');
const isAnimated = require('is-animated');
const htmlSanitizer = require('./html_sanitizer.js');
const htmlSanitizer = require('./html_sanitizer');
async function processImage(uploadBuffer, originalName, shrinkImageSwitch) {
const compressImages = optionService.getOptionBool("compressImages");
@ -154,7 +154,7 @@ function saveImageToAttachment(noteId, uploadBuffer, originalName, shrinkImageSw
setTimeout(() => {
sql.transactional(() => {
const note = becca.getNoteOrThrow(noteId);
const noteService = require('../services/notes.js');
const noteService = require('../services/notes');
noteService.asyncPostProcessContent(note, note.getContent()); // to mark an unused attachment for deletion
});
}, 5000);

View File

@ -4,10 +4,10 @@ const {Throttle} = require('stream-throttle');
const log = require('../log');
const utils = require('../utils');
const sql = require('../sql');
const noteService = require('../notes.js');
const noteService = require('../notes');
const imageService = require('../image.js');
const protectedSessionService = require('../protected_session');
const htmlSanitizer = require('../html_sanitizer.js');
const htmlSanitizer = require('../html_sanitizer');
const {sanitizeAttributeName} = require('../sanitize_attribute_name');
/**

View File

@ -1,7 +1,7 @@
"use strict";
const marked = require("marked");
const htmlSanitizer = require('../html_sanitizer.js');
const htmlSanitizer = require('../html_sanitizer');
const importUtils = require('./utils');
function renderToHtml(content, title) {

View File

@ -1,9 +1,9 @@
"use strict";
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const parseString = require('xml2js').parseString;
const protectedSessionService = require('../protected_session');
const htmlSanitizer = require('../html_sanitizer.js');
const htmlSanitizer = require('../html_sanitizer');
/**
* @param {TaskContext} taskContext

View File

@ -1,13 +1,13 @@
"use strict";
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const imageService = require('../../services/image.js');
const protectedSessionService = require('../protected_session');
const markdownService = require('./markdown.js');
const mimeService = require('./mime.js');
const utils = require('../../services/utils');
const importUtils = require('./utils');
const htmlSanitizer = require('../html_sanitizer.js');
const htmlSanitizer = require('../html_sanitizer');
function importSingleFile(taskContext, file, parentNote) {
const mime = mimeService.getMime(file.originalname) || file.mimetype;

View File

@ -3,7 +3,7 @@
const BAttribute = require('../../becca/entities/battribute');
const utils = require('../../services/utils');
const log = require('../../services/log');
const noteService = require('../../services/notes.js');
const noteService = require('../../services/notes');
const attributeService = require('../../services/attributes.js');
const BBranch = require('../../becca/entities/bbranch');
const path = require('path');
@ -11,7 +11,7 @@ const protectedSessionService = require('../protected_session');
const mimeService = require('./mime.js');
const treeService = require('../tree.js');
const yauzl = require("yauzl");
const htmlSanitizer = require('../html_sanitizer.js');
const htmlSanitizer = require('../html_sanitizer');
const becca = require('../../becca/becca');
const BAttachment = require('../../becca/entities/battachment');
const markdownService = require('./markdown.js');

View File

@ -1,12 +1,21 @@
"use strict";
const optionService = require('./options');
const log = require('./log');
const utils = require('./utils');
import optionService = require('./options');
import log = require('./log');
import utils = require('./utils');
const isMac = process.platform === "darwin";
const isElectron = utils.isElectron();
interface KeyboardShortcut {
separator?: string;
actionName?: string;
description?: string;
defaultShortcuts?: string[];
effectiveShortcuts?: string[];
scope?: string;
}
/**
* Scope here means on which element the keyboard shortcuts are attached - this means that for the shortcut to work,
* the focus has to be inside the element.
@ -16,7 +25,7 @@ const isElectron = utils.isElectron();
* e.g. CTRL-C in note tree does something a bit different from CTRL-C in the text editor.
*/
const DEFAULT_KEYBOARD_ACTIONS = [
const DEFAULT_KEYBOARD_ACTIONS: KeyboardShortcut[] = [
{
separator: "Note navigation"
},
@ -606,15 +615,15 @@ for (const action of DEFAULT_KEYBOARD_ACTIONS) {
}
function getKeyboardActions() {
const actions = JSON.parse(JSON.stringify(DEFAULT_KEYBOARD_ACTIONS));
const actions: KeyboardShortcut[] = JSON.parse(JSON.stringify(DEFAULT_KEYBOARD_ACTIONS));
for (const action of actions) {
action.effectiveShortcuts = action.effectiveShortcuts ? action.defaultShortcuts.slice() : [];
action.effectiveShortcuts = action.defaultShortcuts ? action.defaultShortcuts.slice() : [];
}
for (const option of optionService.getOptions()) {
if (option.name.startsWith('keyboardShortcuts')) {
let actionName = option.name.substr(17);
let actionName = option.name.substring(17);
actionName = actionName.charAt(0).toLowerCase() + actionName.slice(1);
const action = actions.find(ea => ea.actionName === actionName);
@ -636,7 +645,7 @@ function getKeyboardActions() {
return actions;
}
module.exports = {
export = {
DEFAULT_KEYBOARD_ACTIONS,
getKeyboardActions
};

View File

@ -1,16 +0,0 @@
class AttachmentMeta {
/** @type {string} */
attachmentId;
/** @type {string} */
title;
/** @type {string} */
role;
/** @type {string} */
mime;
/** @type {int} */
position;
/** @type {string} */
dataFileName;
}
module.exports = AttachmentMeta;

View File

@ -0,0 +1,10 @@
interface AttachmentMeta {
attachmentId: string;
title: string;
role: string;
mime: string;
position: number;
dataFileName: string;
}
export = AttachmentMeta;

View File

@ -1,14 +0,0 @@
class AttributeMeta {
/** @type {string} */
type;
/** @type {string} */
name;
/** @type {string} */
value;
/** @type {boolean} */
isInheritable;
/** @type {int} */
position;
}
module.exports = AttributeMeta;

View File

@ -0,0 +1,9 @@
interface AttributeMeta {
type: string;
name: string;
value: string;
isInheritable: boolean;
position: number;
}
export = AttributeMeta;

View File

@ -1,36 +0,0 @@
class NoteMeta {
/** @type {string} */
noteId;
/** @type {string} */
notePath;
/** @type {boolean} */
isClone;
/** @type {string} */
title;
/** @type {int} */
notePosition;
/** @type {string} */
prefix;
/** @type {boolean} */
isExpanded;
/** @type {string} */
type;
/** @type {string} */
mime;
/** @type {string} - 'html' or 'markdown', applicable to text notes only */
format;
/** @type {string} */
dataFileName;
/** @type {string} */
dirFileName;
/** @type {boolean} - this file should not be imported (e.g., HTML navigation) */
noImport = false;
/** @type {AttributeMeta[]} */
attributes;
/** @type {AttachmentMeta[]} */
attachments;
/** @type {NoteMeta[]|undefined} */
children;
}
module.exports = NoteMeta;

View File

@ -0,0 +1,24 @@
import AttributeMeta = require("./attribute_meta");
interface NoteMeta {
noteId: string;
notePath: string;
isClone: boolean;
title: string;
notePosition: number;
prefix: string;
isExpanded: boolean;
type: string;
mime: string;
/** 'html' or 'markdown', applicable to text notes only */
format: "html" | "markdown";
dataFileName: string;
dirFileName: string;
/** this file should not be imported (e.g., HTML navigation) */
noImport: boolean;
attributes: AttributeMeta[];
attachments: AttributeMeta[];
children?: NoteMeta[];
}
export = NoteMeta;

View File

@ -1,11 +1,18 @@
const backupService = require('./backup.js');
const sql = require('./sql');
const fs = require('fs-extra');
const log = require('./log');
const utils = require('./utils');
const resourceDir = require('./resource_dir');
const appInfo = require('./app_info.js');
const cls = require('./cls');
import backupService = require('./backup');
import sql = require('./sql');
import fs = require('fs-extra');
import log = require('./log');
import utils = require('./utils');
import resourceDir = require('./resource_dir');
import appInfo = require('./app_info');
import cls = require('./cls');
interface MigrationInfo {
dbVersion: number;
name: string;
file: string;
type: string;
}
async function migrate() {
const currentDbVersion = getDbVersion();
@ -25,7 +32,12 @@ async function migrate() {
: 'before-migration'
);
const migrations = fs.readdirSync(resourceDir.MIGRATIONS_DIR).map(file => {
const migrationFiles = fs.readdirSync(resourceDir.MIGRATIONS_DIR);
if (migrationFiles == null) {
return;
}
const migrations = migrationFiles.map(file => {
const match = file.match(/^([0-9]{4})__([a-zA-Z0-9_ ]+)\.(sql|js)$/);
if (!match) {
return null;
@ -45,7 +57,7 @@ async function migrate() {
} else {
return null;
}
}).filter(el => !!el);
}).filter((el): el is MigrationInfo => !!el);
migrations.sort((a, b) => a.dbVersion - b.dbVersion);
@ -67,7 +79,7 @@ async function migrate() {
WHERE name = ?`, [mig.dbVersion.toString(), "dbVersion"]);
log.info(`Migration to version ${mig.dbVersion} has been successful.`);
} catch (e) {
} catch (e: any) {
log.error(`error during migration to version ${mig.dbVersion}: ${e.stack}`);
log.error("migration failed, crashing hard"); // this is not very user-friendly :-/
@ -84,7 +96,7 @@ async function migrate() {
}
}
function executeMigration(mig) {
function executeMigration(mig: MigrationInfo) {
if (mig.type === 'sql') {
const migrationSql = fs.readFileSync(`${resourceDir.MIGRATIONS_DIR}/${mig.file}`).toString('utf8');
@ -131,7 +143,7 @@ async function migrateIfNecessary() {
}
}
module.exports = {
export = {
migrateIfNecessary,
isDbUpToDate
};

View File

@ -16,7 +16,7 @@ const noteTypes = [
{ type: 'contentWidget', defaultMime: '' }
];
function getDefaultMimeForNoteType(typeName) {
function getDefaultMimeForNoteType(typeName: string) {
const typeRec = noteTypes.find(nt => nt.type === typeName);
if (!typeRec) {
@ -26,7 +26,7 @@ function getDefaultMimeForNoteType(typeName) {
return typeRec.defaultMime;
}
module.exports = {
export = {
getNoteTypeNames: () => noteTypes.map(nt => nt.type),
getDefaultMimeForNoteType
};

Some files were not shown because too many files have changed in this diff Show More