Merge pull request #325 from JYC333/action

FIx Github actions
This commit is contained in:
Elian Doran 2024-08-10 23:00:15 +03:00 committed by GitHub
commit 395cf59e59
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 1255 additions and 522 deletions

View File

@ -23,13 +23,17 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
arch: [x64, arm64]
os: os:
- name: macos - name: macos
image: macos-latest image: macos-latest
extension: dmg
- name: linux - name: linux
image: ubuntu-latest image: ubuntu-latest
extension: deb
- name: windows - name: windows
image: windows-latest image: windows-latest
extension: exe
runs-on: ${{ matrix.os.image }} runs-on: ${{ matrix.os.image }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -45,9 +49,59 @@ jobs:
- name: Update build info - name: Update build info
run: npm run update-build-info run: npm run update-build-info
- name: Run electron-forge - name: Run electron-forge
run: npm run make-electron run: npm run make-electron -- --arch=${{ matrix.arch }}
- name: Prepare artifacts (Unix)
if: runner.os != 'windows'
run: |
mkdir -p upload
file=$(find out/make -name '*.zip' -print -quit)
cp "$file" "upload/TriliumNextNotes-${{ matrix.os.name }}-${{ matrix.arch }}-${{ github.ref_name }}.zip"
file=$(find out/make -name '*.${{ matrix.os.extension }}' -print -quit)
cp "$file" "upload/TriliumNextNotes-${{ matrix.os.name }}-${{ matrix.arch }}-${{ github.ref_name }}.${{ matrix.os.extension }}"
- name: Prepare artifacts (Windows)
if: runner.os == 'windows'
run: |
mkdir upload
$file = Get-ChildItem -Path out/make -Filter '*.zip' -Recurse | Select-Object -First 1
Copy-Item -Path $file.FullName -Destination "upload/TriliumNextNotes-${{ matrix.os.name }}-${{ matrix.arch }}-${{ github.ref_name }}.zip"
$file = Get-ChildItem -Path out/make -Filter '*.${{ matrix.os.extension }}' -Recurse | Select-Object -First 1
Copy-Item -Path $file.FullName -Destination "upload/TriliumNextNotes-${{ matrix.os.name }}-${{ matrix.arch }}-${{ github.ref_name }}.${{ matrix.os.extension }}"
- name: Publish artifacts - name: Publish artifacts
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.os.name }} name: TriliumNextNotes ${{ matrix.os.name }} ${{ matrix.arch }}
path: out/make/** path: upload/*.zip
overwrite: true
- name: Publish installer artifacts
uses: actions/upload-artifact@v4
with:
name: TriliumNextNotes ${{ matrix.os.name }} ${{ matrix.arch }}
path: upload/*.${{ matrix.os.extension }}
overwrite: true
build_linux_server-x64:
name: Build Linux Server x86_64
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up node & dependencies
uses: actions/setup-node@v4
with:
node-version: 20
cache: "npm"
- name: Install dependencies
run: npm ci
- name: Run Linux server build (x86_64)
run: |
npm run update-build-info
./bin/build-server.sh
- name: Prepare artifacts
if: runner.os != 'windows'
run: |
mkdir -p upload
file=$(find dist -name '*.tar.xz' -print -quit)
cp "$file" "upload/TriliumNextNotes-linux-x64-${{ github.ref_name }}.tar.xz"
- uses: actions/upload-artifact@v4
with:
name: TriliumNextNotes linux server x64
path: upload/TriliumNextNotes-linux-x64-${{ github.ref_name }}.tar.xz
overwrite: true

View File

@ -1,7 +1,7 @@
#!/usr/bin/env node #!/usr/bin/env node
const anonymizationService = require('../src/services/anonymization'); import anonymizationService from '../src/services/anonymization.js';
const fs = require('fs'); import fs from 'fs';
const path = require('path'); import path from 'path';
fs.writeFileSync(path.resolve(__dirname, 'tpl', 'anonymize-database.sql'), anonymizationService.getFullAnonymizationScript()); fs.writeFileSync(path.resolve(__dirname, 'tpl', 'anonymize-database.sql'), anonymizationService.getFullAnonymizationScript());

View File

@ -14,10 +14,10 @@ npm install
## Running ## Running
See output of `node dump-db.js --help`: See output of `npx esrun dump.ts --help`:
``` ```
dump-db.js <path_to_document> <target_directory> dump-db.ts <path_to_document> <target_directory>
dump the contents of document.db into the target directory dump the contents of document.db into the target directory

View File

@ -1,14 +1,14 @@
#!/usr/bin/env node #!/usr/bin/env node
const yargs = require('yargs/yargs') import yargs from 'yargs';
const { hideBin } = require('yargs/helpers') import { hideBin } from 'yargs/helpers';
const dumpService = require('./inc/dump.js'); import dumpService from './inc/dump.js';
yargs(hideBin(process.argv)) yargs(hideBin(process.argv))
.command('$0 <path_to_document> <target_directory>', 'dump the contents of document.db into the target directory', (yargs) => { .command('$0 <path_to_document> <target_directory>', 'dump the contents of document.db into the target directory', (yargs) => {
return yargs return yargs
.positional('path_to_document', { describe: 'path to the document.db' }) .option('path_to_document', { alias: 'p', describe: 'path to the document.db', type: 'string', demandOption: true })
.positional('target_directory', { describe: 'path of the directory into which the notes should be dumped' }) .option('target_directory', { alias: 't', describe: 'path of the directory into which the notes should be dumped', type: 'string', demandOption: true });
}, (argv) => { }, (argv) => {
try { try {
dumpService.dumpDocument(argv.path_to_document, argv.target_directory, { dumpService.dumpDocument(argv.path_to_document, argv.target_directory, {

View File

@ -1,8 +1,8 @@
const crypto = require("crypto"); import crypto from 'crypto';
const sql = require('./sql'); import sql from './sql.js';
const decryptService = require('./decrypt.js'); import decryptService from './decrypt.js';
function getDataKey(password) { function getDataKey(password: any) {
if (!password) { if (!password) {
return null; return null;
} }
@ -16,28 +16,28 @@ function getDataKey(password) {
return decryptedDataKey; return decryptedDataKey;
} }
catch (e) { catch (e: any) {
throw new Error(`Cannot read data key, the entered password might be wrong. The underlying error: '${e.message}', stack:\n${e.stack}`); throw new Error(`Cannot read data key, the entered password might be wrong. The underlying error: '${e.message}', stack:\n${e.stack}`);
} }
} }
function getPasswordDerivedKey(password) { function getPasswordDerivedKey(password: any) {
const salt = getOption('passwordDerivedKeySalt'); const salt = getOption('passwordDerivedKeySalt');
return getScryptHash(password, salt); return getScryptHash(password, salt);
} }
function getScryptHash(password, salt) { function getScryptHash(password: any, salt: any) {
const hashed = crypto.scryptSync(password, salt, 32, const hashed = crypto.scryptSync(password, salt, 32,
{N: 16384, r:8, p:1}); { N: 16384, r: 8, p: 1 });
return hashed; return hashed;
} }
function getOption(name) { function getOption(name: string) {
return sql.getValue("SELECT value FROM options WHERE name = ?", [name]); return sql.getValue("SELECT value FROM options WHERE name = ?", [name]);
} }
module.exports = { export default {
getDataKey getDataKey
}; };

View File

@ -1,6 +1,6 @@
const crypto = require("crypto"); import crypto from 'crypto';
function decryptString(dataKey, cipherText) { function decryptString(dataKey: any, cipherText: any) {
const buffer = decrypt(dataKey, cipherText); const buffer = decrypt(dataKey, cipherText);
if (buffer === null) { if (buffer === null) {
@ -16,7 +16,7 @@ function decryptString(dataKey, cipherText) {
return str; return str;
} }
function decrypt(key, cipherText, ivLength = 13) { function decrypt(key: any, cipherText: any, ivLength = 13) {
if (cipherText === null) { if (cipherText === null) {
return null; return null;
} }
@ -46,11 +46,10 @@ function decrypt(key, cipherText, ivLength = 13) {
return payload; return payload;
} }
catch (e) { catch (e: any) {
// recovery from https://github.com/zadam/trilium/issues/510 // recovery from https://github.com/zadam/trilium/issues/510
if (e.message?.includes("WRONG_FINAL_BLOCK_LENGTH") || e.message?.includes("wrong final block length")) { if (e.message?.includes("WRONG_FINAL_BLOCK_LENGTH") || e.message?.includes("wrong final block length")) {
log.info("Caught WRONG_FINAL_BLOCK_LENGTH, returning cipherText instead"); console.log("Caught WRONG_FINAL_BLOCK_LENGTH, returning cipherText instead");
return cipherText; return cipherText;
} }
else { else {
@ -59,7 +58,7 @@ function decrypt(key, cipherText, ivLength = 13) {
} }
} }
function pad(data) { function pad(data: any) {
if (data.length > 16) { if (data.length > 16) {
data = data.slice(0, 16); data = data.slice(0, 16);
} }
@ -72,7 +71,7 @@ function pad(data) {
return Buffer.from(data); return Buffer.from(data);
} }
function arraysIdentical(a, b) { function arraysIdentical(a: any, b: any) {
let i = a.length; let i = a.length;
if (i !== b.length) return false; if (i !== b.length) return false;
while (i--) { while (i--) {
@ -81,12 +80,12 @@ function arraysIdentical(a, b) {
return true; return true;
} }
function shaArray(content) { function shaArray(content: any) {
// we use this as simple checksum and don't rely on its security so SHA-1 is good enough // we use this as simple checksum and don't rely on its security so SHA-1 is good enough
return crypto.createHash('sha1').update(content).digest(); return crypto.createHash('sha1').update(content).digest();
} }
module.exports = { export default {
decrypt, decrypt,
decryptString decryptString
}; };

View File

@ -1,11 +1,11 @@
const fs = require("fs"); import fs from 'fs';
const sanitize = require("sanitize-filename"); import sanitize from 'sanitize-filename';
const sql = require('./sql.js'); import sql from './sql.js';
const decryptService = require('./decrypt.js'); import decryptService from './decrypt.js';
const dataKeyService = require('./data_key.js'); import dataKeyService from './data_key.js';
const extensionService = require('./extension.js'); import extensionService from './extension.js';
function dumpDocument(documentPath, targetPath, options) { function dumpDocument(documentPath: string, targetPath: string, options: { password: any; includeDeleted: any; }) {
const stats = { const stats = {
succeeded: 0, succeeded: 0,
failed: 0, failed: 0,
@ -19,14 +19,14 @@ function dumpDocument(documentPath, targetPath, options) {
const dataKey = dataKeyService.getDataKey(options.password); const dataKey = dataKeyService.getDataKey(options.password);
const existingPaths = {}; const existingPaths: Record<string, any> = {};
const noteIdToPath = {}; const noteIdToPath: Record<string, any> = {};
dumpNote(targetPath, 'root'); dumpNote(targetPath, 'root');
printDumpResults(stats, options); printDumpResults(stats, options);
function dumpNote(targetPath, noteId) { function dumpNote(targetPath: any, noteId: any) {
console.log(`Reading note '${noteId}'`); console.log(`Reading note '${noteId}'`);
let childTargetPath, noteRow, fileNameWithPath; let childTargetPath, noteRow, fileNameWithPath;
@ -94,7 +94,7 @@ function dumpDocument(documentPath, targetPath, options) {
noteIdToPath[noteId] = childTargetPath; noteIdToPath[noteId] = childTargetPath;
} }
catch (e) { catch (e: any) {
console.error(`DUMPERROR: Writing '${noteId}' failed with error '${e.message}':\n${e.stack}`); console.error(`DUMPERROR: Writing '${noteId}' failed with error '${e.message}':\n${e.stack}`);
stats.failed++; stats.failed++;
@ -108,9 +108,9 @@ function dumpDocument(documentPath, targetPath, options) {
} }
try { try {
fs.mkdirSync(childTargetPath, { recursive: true }); fs.mkdirSync(childTargetPath as string, { recursive: true });
} }
catch (e) { catch (e: any) {
console.error(`DUMPERROR: Creating directory ${childTargetPath} failed with error '${e.message}'`); console.error(`DUMPERROR: Creating directory ${childTargetPath} failed with error '${e.message}'`);
} }
@ -121,7 +121,7 @@ function dumpDocument(documentPath, targetPath, options) {
} }
} }
function printDumpResults(stats, options) { function printDumpResults(stats: any, options: any) {
console.log('\n----------------------- STATS -----------------------'); console.log('\n----------------------- STATS -----------------------');
console.log('Successfully dumpted notes: ', stats.succeeded.toString().padStart(5, ' ')); console.log('Successfully dumpted notes: ', stats.succeeded.toString().padStart(5, ' '));
console.log('Protected notes: ', stats.protected.toString().padStart(5, ' '), options.password ? '' : '(skipped)'); console.log('Protected notes: ', stats.protected.toString().padStart(5, ' '), options.password ? '' : '(skipped)');
@ -134,7 +134,7 @@ function printDumpResults(stats, options) {
} }
} }
function isContentEmpty(content) { function isContentEmpty(content: any) {
if (!content) { if (!content) {
return true; return true;
} }
@ -150,7 +150,7 @@ function isContentEmpty(content) {
} }
} }
function validatePaths(documentPath, targetPath) { function validatePaths(documentPath: string, targetPath: string) {
if (!fs.existsSync(documentPath)) { if (!fs.existsSync(documentPath)) {
console.error(`Path to document '${documentPath}' has not been found. Run with --help to see usage.`); console.error(`Path to document '${documentPath}' has not been found. Run with --help to see usage.`);
process.exit(1); process.exit(1);
@ -166,6 +166,6 @@ function validatePaths(documentPath, targetPath) {
} }
} }
module.exports = { export default {
dumpDocument dumpDocument
}; };

View File

@ -1,7 +1,7 @@
const path = require("path"); import path from "path";
const mimeTypes = require("mime-types"); import mimeTypes from "mime-types";
function getFileName(note, childTargetPath, safeTitle) { function getFileName(note: any, childTargetPath: string, safeTitle: string) {
let existingExtension = path.extname(safeTitle).toLowerCase(); let existingExtension = path.extname(safeTitle).toLowerCase();
let newExtension; let newExtension;
@ -29,6 +29,6 @@ function getFileName(note, childTargetPath, safeTitle) {
return fileNameWithPath; return fileNameWithPath;
} }
module.exports = { export default {
getFileName getFileName
}; };

View File

@ -1,17 +0,0 @@
const Database = require("better-sqlite3");
let dbConnection;
const openDatabase = (documentPath) => { dbConnection = new Database(documentPath, { readonly: true }) };
const getRow = (query, params = []) => dbConnection.prepare(query).get(params);
const getRows = (query, params = []) => dbConnection.prepare(query).all(params);
const getValue = (query, params = []) => dbConnection.prepare(query).pluck().get(params);
const getColumn = (query, params = []) => dbConnection.prepare(query).pluck().all(params);
module.exports = {
openDatabase,
getRow,
getRows,
getValue,
getColumn
};

18
dump-db/inc/sql.ts Normal file
View File

@ -0,0 +1,18 @@
import Database, { Database as DatabaseType } from "better-sqlite3";
let dbConnection: DatabaseType;
const openDatabase = (documentPath: string) => { dbConnection = new Database(documentPath, { readonly: true }) };
const getRow = (query: string, params: string[] = []): Record<string, any> => dbConnection.prepare(query).get(params) as Record<string, any>;
const getRows = (query: string, params = []) => dbConnection.prepare(query).all(params);
const getValue = (query: string, params: string[] = []) => dbConnection.prepare(query).pluck().get(params);
const getColumn = (query: string, params: string[] = []) => dbConnection.prepare(query).pluck().all(params);
export default {
openDatabase,
getRow,
getRows,
getValue,
getColumn
};

1513
dump-db/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,24 +2,30 @@
"name": "dump-db", "name": "dump-db",
"version": "1.0.0", "version": "1.0.0",
"description": "Standalone tool to dump contents of Trilium document.db file into a directory tree of notes", "description": "Standalone tool to dump contents of Trilium document.db file into a directory tree of notes",
"main": "dump-db.js", "main": "dump-db.ts",
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/zadam/trilium.git" "url": "git+https://github.com/TriliumNext/Notes.git"
}, },
"author": "zadam", "author": "TriliumNext",
"license": "ISC", "license": "ISC",
"bugs": { "bugs": {
"url": "https://github.com/zadam/trilium/issues" "url": "https://github.com/TriliumNext/Notes/issues"
}, },
"homepage": "https://github.com/zadam/trilium/dump-db#readme", "homepage": "https://github.com/TriliumNext/Notes/blob/master/dump-db/README.md",
"dependencies": { "dependencies": {
"better-sqlite3": "7.5.0", "better-sqlite3": "^11.1.2",
"mime-types": "2.1.34", "esrun": "^3.2.26",
"sanitize-filename": "1.6.3", "mime-types": "^2.1.34",
"yargs": "17.3.1" "sanitize-filename": "^1.6.3",
"yargs": "^17.3.1"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.11",
"@types/mime-types": "^2.1.4",
"@types/yargs": "^17.0.33"
} }
} }

10
dump-db/tsconfig.json Normal file
View File

@ -0,0 +1,10 @@
{
"compilerOptions": {
"module": "ESNext",
"moduleResolution": "node",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"target": "ES6",
"strict": true
}
}

1
package-lock.json generated
View File

@ -15866,6 +15866,7 @@
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"@cspotcode/source-map-support": "^0.8.0", "@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7", "@tsconfig/node10": "^1.0.7",

View File

@ -44,7 +44,8 @@
"update-build-info": "tsx bin/update-build-info.ts", "update-build-info": "tsx bin/update-build-info.ts",
"errors": "tsc --watch --noEmit", "errors": "tsc --watch --noEmit",
"integration-edit-db": "cross-env TRILIUM_INTEGRATION_TEST=edit TRILIUM_PORT=8081 TRILIUM_DATA_DIR=./integration-tests/db nodemon src/www.ts", "integration-edit-db": "cross-env TRILIUM_INTEGRATION_TEST=edit TRILIUM_PORT=8081 TRILIUM_DATA_DIR=./integration-tests/db nodemon src/www.ts",
"integration-mem-db": "cross-env TRILIUM_INTEGRATION_TEST=memory TRILIUM_PORT=8082 TRILIUM_DATA_DIR=./integration-tests/db nodemon src/www.ts" "integration-mem-db": "cross-env TRILIUM_INTEGRATION_TEST=memory TRILIUM_PORT=8082 TRILIUM_DATA_DIR=./integration-tests/db nodemon src/www.ts",
"generate-document": "cross-env nodemon src/tools/generate_document.ts 1000"
}, },
"dependencies": { "dependencies": {
"@braintree/sanitize-url": "^7.1.0", "@braintree/sanitize-url": "^7.1.0",

View File

@ -371,7 +371,7 @@ interface Api {
* This object contains "at your risk" and "no BC guarantees" objects for advanced use cases. * This object contains "at your risk" and "no BC guarantees" objects for advanced use cases.
*/ */
__private: { __private: {
/** provides access to the backend in-memory object graph, see {@link https://github.com/zadam/trilium/blob/master/src/becca/becca.js} */ /** provides access to the backend in-memory object graph, see {@link Becca} */
becca: Becca; becca: Becca;
}; };
} }

View File

@ -107,7 +107,7 @@ function getNewNoteTitle(parentNote: BNote) {
// - now // - now
// - parentNote // - parentNote
title = eval(`\`${titleTemplate}\``); title = (0, eval)(`\`${titleTemplate}\``);
} catch (e: any) { } catch (e: any) {
log.error(`Title template of note '${parentNote.noteId}' failed with: ${e.message}`); log.error(`Title template of note '${parentNote.noteId}' failed with: ${e.message}`);
} }

View File

@ -99,7 +99,7 @@ function executeScript(script: string, params: ScriptParams, startNoteId: string
} }
function execute(ctx: ScriptContext, script: string) { function execute(ctx: ScriptContext, script: string) {
return function () { return eval(`const apiContext = this;\r\n(${script}\r\n)()`); }.call(ctx); return function () { return (0, eval)(`const apiContext = this;\r\n(${script}\r\n)()`); }.call(ctx);
} }
function getParams(params?: ScriptParams) { function getParams(params?: ScriptParams) {

View File

@ -3,13 +3,13 @@
* will create 1000 new notes and some clones into the current document.db * will create 1000 new notes and some clones into the current document.db
*/ */
require('../becca/entity_constructor'); import sqlInit from '../services/sql_init.js';
const sqlInit = require('../services/sql_init'); import noteService from '../services/notes.js';
const noteService = require('../services/notes'); import attributeService from '../services/attributes.js';
const attributeService = require('../services/attributes'); import cls from '../services/cls.js';
const cls = require('../services/cls'); import cloningService from '../services/cloning.js';
const cloningService = require('../services/cloning'); import loremIpsum from 'lorem-ipsum';
const loremIpsum = require('lorem-ipsum').loremIpsum; import '../becca/entity_constructor.js';
const noteCount = parseInt(process.argv[2]); const noteCount = parseInt(process.argv[2]);
@ -28,7 +28,7 @@ function getRandomNoteId() {
async function start() { async function start() {
for (let i = 0; i < noteCount; i++) { for (let i = 0; i < noteCount; i++) {
const title = loremIpsum({ const title = loremIpsum.loremIpsum({
count: 1, count: 1,
units: 'sentences', units: 'sentences',
sentenceLowerBound: 1, sentenceLowerBound: 1,
@ -36,7 +36,7 @@ async function start() {
}); });
const paragraphCount = Math.floor(Math.random() * Math.random() * 100); const paragraphCount = Math.floor(Math.random() * Math.random() * 100);
const content = loremIpsum({ const content = loremIpsum.loremIpsum({
count: paragraphCount, count: paragraphCount,
units: 'paragraphs', units: 'paragraphs',
sentenceLowerBound: 1, sentenceLowerBound: 1,
@ -46,7 +46,7 @@ async function start() {
format: 'html' format: 'html'
}); });
const {note} = noteService.createNewNote({ const { note } = noteService.createNewNote({
parentNoteId: getRandomNoteId(), parentNoteId: getRandomNoteId(),
title, title,
content, content,
@ -58,7 +58,7 @@ async function start() {
if (Math.random() < 0.04) { if (Math.random() < 0.04) {
const noteIdToClone = note.noteId; const noteIdToClone = note.noteId;
const parentNoteId = getRandomNoteId(); const parentNoteId = getRandomNoteId();
const prefix = Math.random() > 0.8 ? "prefix" : null; const prefix = Math.random() > 0.8 ? "prefix" : '';
const result = await cloningService.cloneNoteToBranch(noteIdToClone, parentNoteId, prefix); const result = await cloningService.cloneNoteToBranch(noteIdToClone, parentNoteId, prefix);

View File

@ -14,7 +14,7 @@
}, },
"include": [ "include": [
"./src/**/*.js", "./src/**/*.js",
"./src/**/*.ts", "./src/**/*.ts",
"./*.ts", "./*.ts",
"./spec/**/*.ts", "./spec/**/*.ts",
"./spec-es6/**/*.ts" "./spec-es6/**/*.ts"