server-ts: Convert more classes, including entity_changes.js

This commit is contained in:
Elian Doran 2024-02-16 23:56:32 +02:00
parent cb14d4d8f9
commit e04bd5aaf0
No known key found for this signature in database
10 changed files with 238 additions and 228 deletions

View File

@ -6,4 +6,4 @@ class NotFoundError {
}
}
module.exports = NotFoundError;
export = NotFoundError;

View File

@ -0,0 +1,5 @@
export interface Blob {
blobId: string;
content: Buffer;
utcDateModified: string;
}

View File

@ -2,6 +2,7 @@ import becca = require('../becca/becca.js');
import NotFoundError = require('../errors/not_found_error');
import protectedSessionService = require('./protected_session');
import utils = require('./utils');
import type { Blob } from "./blob-interface";
function getBlobPojo(entityName: string, entityId: string) {
const entity = becca.getEntity(entityName, entityId);
@ -48,11 +49,11 @@ function processContent(content: Buffer | string | null, isProtected: boolean, i
}
}
function calculateContentHash({blobId, content}: { blobId: string, content: Buffer }) {
function calculateContentHash({blobId, content}: Blob) {
return utils.hash(`${blobId}|${content.toString()}`);
}
module.exports = {
export = {
getBlobPojo,
processContent,
calculateContentHash

View File

@ -95,7 +95,7 @@ function validateUtcDateTime(str: string) {
}
}
module.exports = {
export = {
utcNowDateTime,
localNowDateTime,
localNowDate,

View File

@ -1,202 +0,0 @@
const sql = require('./sql');
const dateUtils = require('./date_utils');
const log = require('./log');
const cls = require('./cls');
const utils = require('./utils');
const instanceId = require('./instance_id');
const becca = require('../becca/becca.js');
const blobService = require('../services/blob.js');
let maxEntityChangeId = 0;
function putEntityChangeWithInstanceId(origEntityChange, instanceId) {
const ec = {...origEntityChange, instanceId};
putEntityChange(ec);
}
function putEntityChangeWithForcedChange(origEntityChange) {
const ec = {...origEntityChange, changeId: null};
putEntityChange(ec);
}
function putEntityChange(origEntityChange) {
const ec = {...origEntityChange};
delete ec.id;
if (!ec.changeId) {
ec.changeId = utils.randomString(12);
}
ec.componentId = ec.componentId || cls.getComponentId() || "NA"; // NA = not available
ec.instanceId = ec.instanceId || instanceId;
ec.isSynced = ec.isSynced ? 1 : 0;
ec.isErased = ec.isErased ? 1 : 0;
ec.id = sql.replace("entity_changes", ec);
maxEntityChangeId = Math.max(maxEntityChangeId, ec.id);
cls.putEntityChange(ec);
}
function putNoteReorderingEntityChange(parentNoteId, componentId) {
putEntityChange({
entityName: "note_reordering",
entityId: parentNoteId,
hash: 'N/A',
isErased: false,
utcDateChanged: dateUtils.utcNowDateTime(),
isSynced: true,
componentId,
instanceId
});
const eventService = require('./events');
eventService.emit(eventService.ENTITY_CHANGED, {
entityName: 'note_reordering',
entity: sql.getMap(`SELECT branchId, notePosition FROM branches WHERE isDeleted = 0 AND parentNoteId = ?`, [parentNoteId])
});
}
function putEntityChangeForOtherInstances(ec) {
putEntityChange({
...ec,
changeId: null,
instanceId: null
});
}
function addEntityChangesForSector(entityName, sector) {
const entityChanges = sql.getRows(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]);
let entitiesInserted = entityChanges.length;
sql.transactional(() => {
if (entityName === 'blobs') {
entitiesInserted += addEntityChangesForDependingEntity(sector, 'notes', 'noteId');
entitiesInserted += addEntityChangesForDependingEntity(sector, 'attachments', 'attachmentId');
entitiesInserted += addEntityChangesForDependingEntity(sector, 'revisions', 'revisionId');
}
for (const ec of entityChanges) {
putEntityChangeWithForcedChange(ec);
}
});
log.info(`Added sector ${sector} of '${entityName}' (${entitiesInserted} entities) to the sync queue.`);
}
function addEntityChangesForDependingEntity(sector, tableName, primaryKeyColumn) {
// problem in blobs might be caused by problem in entity referencing the blob
const dependingEntityChanges = sql.getRows(`
SELECT dep_change.*
FROM entity_changes orig_sector
JOIN ${tableName} ON ${tableName}.blobId = orig_sector.entityId
JOIN entity_changes dep_change ON dep_change.entityName = '${tableName}' AND dep_change.entityId = ${tableName}.${primaryKeyColumn}
WHERE orig_sector.entityName = 'blobs' AND SUBSTR(orig_sector.entityId, 1, 1) = ?`, [sector]);
for (const ec of dependingEntityChanges) {
putEntityChangeWithForcedChange(ec);
}
return dependingEntityChanges.length;
}
function cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey) {
sql.execute(`
DELETE
FROM entity_changes
WHERE
isErased = 0
AND entityName = '${entityName}'
AND entityId NOT IN (SELECT ${entityPrimaryKey} FROM ${entityName})`);
}
function fillEntityChanges(entityName, entityPrimaryKey, condition = '') {
cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey);
sql.transactional(() => {
const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} ${condition}`);
let createdCount = 0;
for (const entityId of entityIds) {
const existingRows = sql.getValue("SELECT COUNT(1) FROM entity_changes WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
if (existingRows !== 0) {
// we don't want to replace existing entities (which would effectively cause full resync)
continue;
}
createdCount++;
const ec = {
entityName,
entityId,
isErased: false
};
if (entityName === 'blobs') {
const blob = sql.getRow("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]);
ec.hash = blobService.calculateContentHash(blob);
ec.utcDateChanged = blob.utcDateModified;
ec.isSynced = true; // blobs are always synced
} else {
const entity = becca.getEntity(entityName, entityId);
if (entity) {
ec.hash = entity.generateHash();
ec.utcDateChanged = entity.getUtcDateChanged() || dateUtils.utcNowDateTime();
ec.isSynced = entityName !== 'options' || !!entity.isSynced;
} else {
// entity might be null (not present in becca) when it's deleted
// this will produce different hash value than when entity is being deleted since then
// all normal hashed attributes are being used. Sync should recover from that, though.
ec.hash = "deleted";
ec.utcDateChanged = dateUtils.utcNowDateTime();
ec.isSynced = true; // deletable (the ones with isDeleted) entities are synced
}
}
putEntityChange(ec);
}
if (createdCount > 0) {
log.info(`Created ${createdCount} missing entity changes for entity '${entityName}'.`);
}
});
}
function fillAllEntityChanges() {
sql.transactional(() => {
sql.execute("DELETE FROM entity_changes WHERE isErased = 0");
fillEntityChanges("notes", "noteId");
fillEntityChanges("branches", "branchId");
fillEntityChanges("revisions", "revisionId");
fillEntityChanges("attachments", "attachmentId");
fillEntityChanges("blobs", "blobId");
fillEntityChanges("attributes", "attributeId");
fillEntityChanges("etapi_tokens", "etapiTokenId");
fillEntityChanges("options", "name", 'WHERE isSynced = 1');
});
}
function recalculateMaxEntityChangeId() {
maxEntityChangeId = sql.getValue("SELECT COALESCE(MAX(id), 0) FROM entity_changes");
}
module.exports = {
putNoteReorderingEntityChange,
putEntityChangeForOtherInstances,
putEntityChangeWithForcedChange,
putEntityChange,
putEntityChangeWithInstanceId,
fillAllEntityChanges,
addEntityChangesForSector,
getMaxEntityChangeId: () => maxEntityChangeId,
recalculateMaxEntityChangeId
};

View File

@ -1,12 +1,206 @@
export interface EntityChange {
id?: string;
entityName: string;
entityId: string;
hash: string;
utcDateChanged: string;
isSynced: boolean | 1 | 0;
isErased: boolean | 1 | 0;
componentId?: string | null;
changeId?: string | null;
instanceId?: string | null;
import sql = require('./sql');
import dateUtils = require('./date_utils');
import log = require('./log');
import cls = require('./cls');
import utils = require('./utils');
import instanceId = require('./instance_id');
import becca = require('../becca/becca.js');
import blobService = require('../services/blob.js');
import { EntityChange } from './entity_changes_interface';
import type { Blob } from "./blob-interface";
let maxEntityChangeId = 0;
function putEntityChangeWithInstanceId(origEntityChange: EntityChange, instanceId: string) {
const ec = {...origEntityChange, instanceId};
putEntityChange(ec);
}
function putEntityChangeWithForcedChange(origEntityChange: EntityChange) {
const ec = {...origEntityChange, changeId: null};
putEntityChange(ec);
}
function putEntityChange(origEntityChange: EntityChange) {
const ec = {...origEntityChange};
delete ec.id;
if (!ec.changeId) {
ec.changeId = utils.randomString(12);
}
ec.componentId = ec.componentId || cls.getComponentId() || "NA"; // NA = not available
ec.instanceId = ec.instanceId || instanceId;
ec.isSynced = ec.isSynced ? 1 : 0;
ec.isErased = ec.isErased ? 1 : 0;
ec.id = sql.replace("entity_changes", ec);
if (ec.id) {
maxEntityChangeId = Math.max(maxEntityChangeId, ec.id);
}
cls.putEntityChange(ec);
}
function putNoteReorderingEntityChange(parentNoteId: string, componentId: string) {
putEntityChange({
entityName: "note_reordering",
entityId: parentNoteId,
hash: 'N/A',
isErased: false,
utcDateChanged: dateUtils.utcNowDateTime(),
isSynced: true,
componentId,
instanceId
});
const eventService = require('./events');
eventService.emit(eventService.ENTITY_CHANGED, {
entityName: 'note_reordering',
entity: sql.getMap(`SELECT branchId, notePosition FROM branches WHERE isDeleted = 0 AND parentNoteId = ?`, [parentNoteId])
});
}
function putEntityChangeForOtherInstances(ec: EntityChange) {
putEntityChange({
...ec,
changeId: null,
instanceId: null
});
}
function addEntityChangesForSector(entityName: string, sector: string) {
const entityChanges = sql.getRows<EntityChange>(`SELECT * FROM entity_changes WHERE entityName = ? AND SUBSTR(entityId, 1, 1) = ?`, [entityName, sector]);
let entitiesInserted = entityChanges.length;
sql.transactional(() => {
if (entityName === 'blobs') {
entitiesInserted += addEntityChangesForDependingEntity(sector, 'notes', 'noteId');
entitiesInserted += addEntityChangesForDependingEntity(sector, 'attachments', 'attachmentId');
entitiesInserted += addEntityChangesForDependingEntity(sector, 'revisions', 'revisionId');
}
for (const ec of entityChanges) {
putEntityChangeWithForcedChange(ec);
}
});
log.info(`Added sector ${sector} of '${entityName}' (${entitiesInserted} entities) to the sync queue.`);
}
function addEntityChangesForDependingEntity(sector: string, tableName: string, primaryKeyColumn: string) {
// problem in blobs might be caused by problem in entity referencing the blob
const dependingEntityChanges = sql.getRows<EntityChange>(`
SELECT dep_change.*
FROM entity_changes orig_sector
JOIN ${tableName} ON ${tableName}.blobId = orig_sector.entityId
JOIN entity_changes dep_change ON dep_change.entityName = '${tableName}' AND dep_change.entityId = ${tableName}.${primaryKeyColumn}
WHERE orig_sector.entityName = 'blobs' AND SUBSTR(orig_sector.entityId, 1, 1) = ?`, [sector]);
for (const ec of dependingEntityChanges) {
putEntityChangeWithForcedChange(ec);
}
return dependingEntityChanges.length;
}
function cleanupEntityChangesForMissingEntities(entityName: string, entityPrimaryKey: string) {
sql.execute(`
DELETE
FROM entity_changes
WHERE
isErased = 0
AND entityName = '${entityName}'
AND entityId NOT IN (SELECT ${entityPrimaryKey} FROM ${entityName})`);
}
function fillEntityChanges(entityName: string, entityPrimaryKey: string, condition = '') {
cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey);
sql.transactional(() => {
const entityIds = sql.getColumn<string>(`SELECT ${entityPrimaryKey} FROM ${entityName} ${condition}`);
let createdCount = 0;
for (const entityId of entityIds) {
const existingRows = sql.getValue("SELECT COUNT(1) FROM entity_changes WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
if (existingRows !== 0) {
// we don't want to replace existing entities (which would effectively cause full resync)
continue;
}
createdCount++;
const ec: Partial<EntityChange> = {
entityName,
entityId,
isErased: false
};
if (entityName === 'blobs') {
const blob = sql.getRow<Blob>("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]);
ec.hash = blobService.calculateContentHash(blob);
ec.utcDateChanged = blob.utcDateModified;
ec.isSynced = true; // blobs are always synced
} else {
const entity = becca.getEntity(entityName, entityId);
if (entity) {
ec.hash = entity.generateHash();
ec.utcDateChanged = entity.getUtcDateChanged() || dateUtils.utcNowDateTime();
ec.isSynced = entityName !== 'options' || !!entity.isSynced;
} else {
// entity might be null (not present in becca) when it's deleted
// this will produce different hash value than when entity is being deleted since then
// all normal hashed attributes are being used. Sync should recover from that, though.
ec.hash = "deleted";
ec.utcDateChanged = dateUtils.utcNowDateTime();
ec.isSynced = true; // deletable (the ones with isDeleted) entities are synced
}
}
putEntityChange(ec as EntityChange);
}
if (createdCount > 0) {
log.info(`Created ${createdCount} missing entity changes for entity '${entityName}'.`);
}
});
}
function fillAllEntityChanges() {
sql.transactional(() => {
sql.execute("DELETE FROM entity_changes WHERE isErased = 0");
fillEntityChanges("notes", "noteId");
fillEntityChanges("branches", "branchId");
fillEntityChanges("revisions", "revisionId");
fillEntityChanges("attachments", "attachmentId");
fillEntityChanges("blobs", "blobId");
fillEntityChanges("attributes", "attributeId");
fillEntityChanges("etapi_tokens", "etapiTokenId");
fillEntityChanges("options", "name", 'WHERE isSynced = 1');
});
}
function recalculateMaxEntityChangeId() {
maxEntityChangeId = sql.getValue<number>("SELECT COALESCE(MAX(id), 0) FROM entity_changes");
}
export = {
putNoteReorderingEntityChange,
putEntityChangeForOtherInstances,
putEntityChangeWithForcedChange,
putEntityChange,
putEntityChangeWithInstanceId,
fillAllEntityChanges,
addEntityChangesForSector,
getMaxEntityChangeId: () => maxEntityChangeId,
recalculateMaxEntityChangeId
};

View File

@ -0,0 +1,12 @@
export interface EntityChange {
id?: number | null;
entityName: string;
entityId: string;
hash: string;
utcDateChanged: string;
isSynced: boolean | 1 | 0;
isErased: boolean | 1 | 0;
componentId?: string | null;
changeId?: string | null;
instanceId?: string | null;
}

View File

@ -2,4 +2,4 @@ import utils = require('./utils');
const instanceId = utils.randomString(12);
module.exports = instanceId;
export = instanceId;

View File

@ -30,16 +30,16 @@ function encrypt(plainText: string | Buffer) {
return dataEncryptionService.encrypt(dataKey, plainText);
}
function decrypt(cipherText: string | Buffer) {
function decrypt(cipherText: string | Buffer): Buffer | null {
const dataKey = getDataKey();
if (cipherText === null || dataKey === null) {
return null;
}
return dataEncryptionService.decrypt(dataKey, cipherText);
return dataEncryptionService.decrypt(dataKey, cipherText) || null;
}
function decryptString(cipherText: string) {
function decryptString(cipherText: string): string | null {
const dataKey = getDataKey();
if (dataKey === null) {
return null;
@ -70,7 +70,7 @@ function checkProtectedSessionExpiration() {
}
}
module.exports = {
export = {
setDataKey,
resetDataKey,
isProtectedSessionAvailable,

View File

@ -103,8 +103,8 @@ function getRowOrNull<T>(query: string, params: Params = []): T | null {
return (all.length > 0 ? all[0] : null) as (T | null);
}
function getValue(query: string, params: Params = []) {
return wrap(query, s => s.pluck().get(params));
function getValue<T>(query: string, params: Params = []): T {
return wrap(query, s => s.pluck().get(params)) as T;
}
// smaller values can result in better performance due to better usage of statement cache
@ -139,8 +139,8 @@ function getManyRows(query: string, params: Params) {
return results;
}
function getRows(query: string, params: Params = []): unknown[] {
return wrap(query, s => s.all(params)) as unknown[];
function getRows<T>(query: string, params: Params = []): T[] {
return wrap(query, s => s.all(params)) as T[];
}
function getRawRows<T extends {} | unknown[]>(query: string, params: Params = []): T[] | null {
@ -166,8 +166,8 @@ function getMap<K extends string | number | symbol, V>(query: string, params: Pa
return map;
}
function getColumn(query: string, params: Params = []) {
return wrap(query, s => s.pluck().all(params));
function getColumn<T>(query: string, params: Params = []): T[] {
return wrap(query, s => s.pluck().all(params)) as T[];
}
function execute(query: string, params: Params = []): RunResult {