mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
fix incorrect import of relations from tar
(cherry picked from commit 8d14a0d687f195ea24542f78091f3c1c5fd34974)
This commit is contained in:
parent
adae0625b9
commit
3eebce22e7
BIN
db/demo.tar
BIN
db/demo.tar
Binary file not shown.
@ -1,6 +1,5 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
const Attribute = require('../../entities/attribute');
|
const Attribute = require('../../entities/attribute');
|
||||||
const utils = require('../../services/utils');
|
const utils = require('../../services/utils');
|
||||||
const log = require('../../services/log');
|
const log = require('../../services/log');
|
||||||
@ -114,28 +113,18 @@ async function importTar(importContext, fileBuffer, importRootNote) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getNoteId(noteMeta, filePath) {
|
function getNoteId(noteMeta, filePath) {
|
||||||
let noteId;
|
|
||||||
|
|
||||||
const filePathNoExt = getTextFileWithoutExtension(filePath);
|
const filePathNoExt = getTextFileWithoutExtension(filePath);
|
||||||
|
|
||||||
if (noteMeta) {
|
console.log(`Searching for noteId of filePath ${filePath} with meta: ${!!noteMeta}`);
|
||||||
if (filePathNoExt in createdPaths) {
|
|
||||||
noteId = createdPaths[filePathNoExt];
|
if (filePathNoExt in createdPaths) {
|
||||||
noteIdMap[noteMeta.noteId] = noteId;
|
console.log("Found existing path", filePathNoExt, createdPaths[filePathNoExt]);
|
||||||
}
|
|
||||||
else {
|
return createdPaths[filePathNoExt];
|
||||||
noteId = getNewNoteId(noteMeta.noteId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (filePathNoExt in createdPaths) {
|
|
||||||
noteId = createdPaths[filePathNoExt];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
noteId = utils.newEntityId();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const noteId = noteMeta ? getNewNoteId(noteMeta.noteId) : utils.newEntityId();
|
||||||
|
|
||||||
createdPaths[filePathNoExt] = noteId;
|
createdPaths[filePathNoExt] = noteId;
|
||||||
|
|
||||||
return noteId;
|
return noteId;
|
||||||
@ -234,7 +223,8 @@ async function importTar(importContext, fileBuffer, importRootNote) {
|
|||||||
|
|
||||||
absUrl += (absUrl.length > 0 ? '/' : '') + url;
|
absUrl += (absUrl.length > 0 ? '/' : '') + url;
|
||||||
|
|
||||||
const targetNoteId = getNoteId(null, absUrl);
|
const {noteMeta} = getMeta(absUrl);
|
||||||
|
const targetNoteId = getNoteId(noteMeta, absUrl);
|
||||||
return targetNoteId;
|
return targetNoteId;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -340,22 +330,6 @@ async function importTar(importContext, fileBuffer, importRootNote) {
|
|||||||
|
|
||||||
await saveAttributes(note, noteMeta);
|
await saveAttributes(note, noteMeta);
|
||||||
|
|
||||||
if (!noteMeta && (type === 'file' || type === 'image')) {
|
|
||||||
attributes.push({
|
|
||||||
noteId,
|
|
||||||
type: 'label',
|
|
||||||
name: 'originalFileName',
|
|
||||||
value: path.basename(filePath)
|
|
||||||
});
|
|
||||||
|
|
||||||
attributes.push({
|
|
||||||
noteId,
|
|
||||||
type: 'label',
|
|
||||||
name: 'fileSize',
|
|
||||||
value: content.byteLength
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!firstNote) {
|
if (!firstNote) {
|
||||||
firstNote = note;
|
firstNote = note;
|
||||||
}
|
}
|
||||||
@ -364,6 +338,22 @@ async function importTar(importContext, fileBuffer, importRootNote) {
|
|||||||
filePath = getTextFileWithoutExtension(filePath);
|
filePath = getTextFileWithoutExtension(filePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!noteMeta && (type === 'file' || type === 'image')) {
|
||||||
|
attributes.push({
|
||||||
|
noteId,
|
||||||
|
type: 'label',
|
||||||
|
name: 'originalFileName',
|
||||||
|
value: path.basename(filePath)
|
||||||
|
});
|
||||||
|
|
||||||
|
attributes.push({
|
||||||
|
noteId,
|
||||||
|
type: 'label',
|
||||||
|
name: 'fileSize',
|
||||||
|
value: content.byteLength
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return {string} path without leading or trailing slash and backslashes converted to forward ones*/
|
/** @return {string} path without leading or trailing slash and backslashes converted to forward ones*/
|
||||||
@ -426,7 +416,9 @@ async function importTar(importContext, fileBuffer, importRootNote) {
|
|||||||
const noteId = createdPaths[path];
|
const noteId = createdPaths[path];
|
||||||
|
|
||||||
createdNoteIds[noteId] = true;
|
createdNoteIds[noteId] = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const noteId in createdNoteIds) { // now the noteIds are unique
|
||||||
await noteService.scanForLinks(noteId);
|
await noteService.scanForLinks(noteId);
|
||||||
|
|
||||||
importContext.increaseProgressCount();
|
importContext.increaseProgressCount();
|
||||||
|
@ -248,7 +248,7 @@ function findRelationMapLinks(content, foundLinks) {
|
|||||||
foundLinks.push({
|
foundLinks.push({
|
||||||
name: 'relation-map-link',
|
name: 'relation-map-link',
|
||||||
value: note.noteId
|
value: note.noteId
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,12 +64,14 @@ async function fillSyncRows(entityName, entityKey, condition = '') {
|
|||||||
const entityIds = await sql.getColumn(`SELECT ${entityKey} FROM ${entityName}`
|
const entityIds = await sql.getColumn(`SELECT ${entityKey} FROM ${entityName}`
|
||||||
+ (condition ? ` WHERE ${condition}` : ''));
|
+ (condition ? ` WHERE ${condition}` : ''));
|
||||||
|
|
||||||
|
let createdCount = 0;
|
||||||
|
|
||||||
for (const entityId of entityIds) {
|
for (const entityId of entityIds) {
|
||||||
const existingRows = await sql.getValue("SELECT COUNT(id) FROM sync WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
|
const existingRows = await sql.getValue("SELECT COUNT(id) FROM sync WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
|
||||||
|
|
||||||
// we don't want to replace existing entities (which would effectively cause full resync)
|
// we don't want to replace existing entities (which would effectively cause full resync)
|
||||||
if (existingRows === 0) {
|
if (existingRows === 0) {
|
||||||
log.info(`Creating missing sync record for ${entityName} ${entityId}`);
|
createdCount++;
|
||||||
|
|
||||||
await sql.insert("sync", {
|
await sql.insert("sync", {
|
||||||
entityName: entityName,
|
entityName: entityName,
|
||||||
@ -79,6 +81,10 @@ async function fillSyncRows(entityName, entityKey, condition = '') {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (createdCount > 0) {
|
||||||
|
log.info(`Created ${createdCount} missing sync records for ${entityName}.`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
// this is to fix migration from 0.30 to 0.32, can be removed later
|
// this is to fix migration from 0.30 to 0.32, can be removed later
|
||||||
|
Loading…
x
Reference in New Issue
Block a user