fix incorrect import of relations from tar

This commit is contained in:
zadam 2019-10-02 23:22:58 +02:00
parent dec2c218f7
commit 8d14a0d687
4 changed files with 36 additions and 38 deletions

Binary file not shown.

View File

@ -1,6 +1,5 @@
"use strict";
const Attribute = require('../../entities/attribute');
const utils = require('../../services/utils');
const log = require('../../services/log');
@ -114,28 +113,18 @@ async function importTar(importContext, fileBuffer, importRootNote) {
}
function getNoteId(noteMeta, filePath) {
let noteId;
const filePathNoExt = getTextFileWithoutExtension(filePath);
if (noteMeta) {
if (filePathNoExt in createdPaths) {
noteId = createdPaths[filePathNoExt];
noteIdMap[noteMeta.noteId] = noteId;
}
else {
noteId = getNewNoteId(noteMeta.noteId);
}
}
else {
if (filePathNoExt in createdPaths) {
noteId = createdPaths[filePathNoExt];
}
else {
noteId = utils.newEntityId();
}
console.log(`Searching for noteId of filePath ${filePath} with meta: ${!!noteMeta}`);
if (filePathNoExt in createdPaths) {
console.log("Found existing path", filePathNoExt, createdPaths[filePathNoExt]);
return createdPaths[filePathNoExt];
}
const noteId = noteMeta ? getNewNoteId(noteMeta.noteId) : utils.newEntityId();
createdPaths[filePathNoExt] = noteId;
return noteId;
@ -234,7 +223,8 @@ async function importTar(importContext, fileBuffer, importRootNote) {
absUrl += (absUrl.length > 0 ? '/' : '') + url;
const targetNoteId = getNoteId(null, absUrl);
const {noteMeta} = getMeta(absUrl);
const targetNoteId = getNoteId(noteMeta, absUrl);
return targetNoteId;
}
@ -340,22 +330,6 @@ async function importTar(importContext, fileBuffer, importRootNote) {
await saveAttributes(note, noteMeta);
if (!noteMeta && (type === 'file' || type === 'image')) {
attributes.push({
noteId,
type: 'label',
name: 'originalFileName',
value: path.basename(filePath)
});
attributes.push({
noteId,
type: 'label',
name: 'fileSize',
value: content.byteLength
});
}
if (!firstNote) {
firstNote = note;
}
@ -364,6 +338,22 @@ async function importTar(importContext, fileBuffer, importRootNote) {
filePath = getTextFileWithoutExtension(filePath);
}
}
if (!noteMeta && (type === 'file' || type === 'image')) {
attributes.push({
noteId,
type: 'label',
name: 'originalFileName',
value: path.basename(filePath)
});
attributes.push({
noteId,
type: 'label',
name: 'fileSize',
value: content.byteLength
});
}
}
/** @return {string} path without leading or trailing slash and backslashes converted to forward ones*/
@ -426,7 +416,9 @@ async function importTar(importContext, fileBuffer, importRootNote) {
const noteId = createdPaths[path];
createdNoteIds[noteId] = true;
}
for (const noteId in createdNoteIds) { // now the noteIds are unique
await noteService.scanForLinks(noteId);
importContext.increaseProgressCount();

View File

@ -248,7 +248,7 @@ function findRelationMapLinks(content, foundLinks) {
foundLinks.push({
name: 'relation-map-link',
value: note.noteId
})
});
}
}

View File

@ -64,12 +64,14 @@ async function fillSyncRows(entityName, entityKey, condition = '') {
const entityIds = await sql.getColumn(`SELECT ${entityKey} FROM ${entityName}`
+ (condition ? ` WHERE ${condition}` : ''));
let createdCount = 0;
for (const entityId of entityIds) {
const existingRows = await sql.getValue("SELECT COUNT(id) FROM sync WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
// we don't want to replace existing entities (which would effectively cause full resync)
if (existingRows === 0) {
log.info(`Creating missing sync record for ${entityName} ${entityId}`);
createdCount++;
await sql.insert("sync", {
entityName: entityName,
@ -79,6 +81,10 @@ async function fillSyncRows(entityName, entityKey, condition = '') {
});
}
}
if (createdCount > 0) {
log.info(`Created ${createdCount} missing sync records for ${entityName}.`);
}
}
catch (e) {
// this is to fix migration from 0.30 to 0.32, can be removed later