mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
server-ts: Port services/content_hash
This commit is contained in:
parent
0ba80b176c
commit
43c89c0e9d
@ -6,7 +6,7 @@ const entityChangesService = require('../../services/entity_changes');
|
|||||||
const sql = require('../../services/sql');
|
const sql = require('../../services/sql');
|
||||||
const sqlInit = require('../../services/sql_init');
|
const sqlInit = require('../../services/sql_init');
|
||||||
const optionService = require('../../services/options');
|
const optionService = require('../../services/options');
|
||||||
const contentHashService = require('../../services/content_hash.js');
|
const contentHashService = require('../../services/content_hash');
|
||||||
const log = require('../../services/log');
|
const log = require('../../services/log');
|
||||||
const syncOptions = require('../../services/sync_options');
|
const syncOptions = require('../../services/sync_options');
|
||||||
const utils = require('../../services/utils');
|
const utils = require('../../services/utils');
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const sql = require('./sql');
|
import sql = require('./sql');
|
||||||
const utils = require('./utils');
|
import utils = require('./utils');
|
||||||
const log = require('./log');
|
import log = require('./log');
|
||||||
const eraseService = require('./erase');
|
import eraseService = require('./erase');
|
||||||
|
|
||||||
|
type SectorHash = Record<string, string>;
|
||||||
|
|
||||||
function getEntityHashes() {
|
function getEntityHashes() {
|
||||||
// blob erasure is not synced, we should check before each sync if there's some blob to erase
|
// blob erasure is not synced, we should check before each sync if there's some blob to erase
|
||||||
@ -12,8 +14,9 @@ function getEntityHashes() {
|
|||||||
const startTime = new Date();
|
const startTime = new Date();
|
||||||
|
|
||||||
// we know this is slow and the total content hash calculation time is logged
|
// we know this is slow and the total content hash calculation time is logged
|
||||||
|
type HashRow = [ string, string, string, boolean ];
|
||||||
const hashRows = sql.disableSlowQueryLogging(
|
const hashRows = sql.disableSlowQueryLogging(
|
||||||
() => sql.getRawRows(`
|
() => sql.getRawRows<HashRow>(`
|
||||||
SELECT entityName,
|
SELECT entityName,
|
||||||
entityId,
|
entityId,
|
||||||
hash,
|
hash,
|
||||||
@ -27,7 +30,7 @@ function getEntityHashes() {
|
|||||||
// sorting by entityId is enough, hashes will be segmented by entityName later on anyway
|
// sorting by entityId is enough, hashes will be segmented by entityName later on anyway
|
||||||
hashRows.sort((a, b) => a[1] < b[1] ? -1 : 1);
|
hashRows.sort((a, b) => a[1] < b[1] ? -1 : 1);
|
||||||
|
|
||||||
const hashMap = {};
|
const hashMap: Record<string, SectorHash> = {};
|
||||||
|
|
||||||
for (const [entityName, entityId, hash, isErased] of hashRows) {
|
for (const [entityName, entityId, hash, isErased] of hashRows) {
|
||||||
const entityHashMap = hashMap[entityName] = hashMap[entityName] || {};
|
const entityHashMap = hashMap[entityName] = hashMap[entityName] || {};
|
||||||
@ -51,13 +54,13 @@ function getEntityHashes() {
|
|||||||
return hashMap;
|
return hashMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkContentHashes(otherHashes) {
|
function checkContentHashes(otherHashes: Record<string, SectorHash>) {
|
||||||
const entityHashes = getEntityHashes();
|
const entityHashes = getEntityHashes();
|
||||||
const failedChecks = [];
|
const failedChecks = [];
|
||||||
|
|
||||||
for (const entityName in entityHashes) {
|
for (const entityName in entityHashes) {
|
||||||
const thisSectorHashes = entityHashes[entityName] || {};
|
const thisSectorHashes: SectorHash = entityHashes[entityName] || {};
|
||||||
const otherSectorHashes = otherHashes[entityName] || {};
|
const otherSectorHashes: SectorHash = otherHashes[entityName] || {};
|
||||||
|
|
||||||
const sectors = new Set(Object.keys(thisSectorHashes).concat(Object.keys(otherSectorHashes)));
|
const sectors = new Set(Object.keys(thisSectorHashes).concat(Object.keys(otherSectorHashes)));
|
||||||
|
|
||||||
@ -77,7 +80,7 @@ function checkContentHashes(otherHashes) {
|
|||||||
return failedChecks;
|
return failedChecks;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export = {
|
||||||
getEntityHashes,
|
getEntityHashes,
|
||||||
checkContentHashes
|
checkContentHashes
|
||||||
};
|
};
|
@ -7,7 +7,7 @@ const utils = require('./utils');
|
|||||||
const instanceId = require('./instance_id');
|
const instanceId = require('./instance_id');
|
||||||
const dateUtils = require('./date_utils');
|
const dateUtils = require('./date_utils');
|
||||||
const syncUpdateService = require('./sync_update');
|
const syncUpdateService = require('./sync_update');
|
||||||
const contentHashService = require('./content_hash.js');
|
const contentHashService = require('./content_hash');
|
||||||
const appInfo = require('./app_info');
|
const appInfo = require('./app_info');
|
||||||
const syncOptions = require('./sync_options');
|
const syncOptions = require('./sync_options');
|
||||||
const syncMutexService = require('./sync_mutex');
|
const syncMutexService = require('./sync_mutex');
|
||||||
|
Loading…
x
Reference in New Issue
Block a user