diff --git a/src/app.js b/src/app.js index 80568782e..a9024c8b2 100644 --- a/src/app.js +++ b/src/app.js @@ -23,6 +23,7 @@ app.use(helmet({ contentSecurityPolicy: false })); +app.use(bodyParser.text({limit: '500mb'})); app.use(bodyParser.json({limit: '500mb'})); app.use(bodyParser.urlencoded({extended: false})); app.use(cookieParser()); diff --git a/src/services/content_hash.js b/src/services/content_hash.js index d29847ea0..b4b0ffe4e 100644 --- a/src/services/content_hash.js +++ b/src/services/content_hash.js @@ -34,7 +34,7 @@ function getSectorHashes(tableName, primaryKeyName, whereBranch) { function getEntityHashes() { const startTime = new Date(); - const hashRows = sql.getRows(`SELECT entityName, entityId, hash FROM entity_changes`); + const hashRows = sql.getRows(`SELECT entityName, entityId, hash FROM entity_changes WHERE isSynced = 1`); // sorting is faster in memory // sorting by entityId is enough, hashes will be segmented by entityName later on anyway diff --git a/src/services/entity_changes.js b/src/services/entity_changes.js index b19787911..72be32458 100644 --- a/src/services/entity_changes.js +++ b/src/services/entity_changes.js @@ -1,5 +1,4 @@ const sql = require('./sql'); -const repository = require('./repository'); const sourceIdService = require('./source_id'); const dateUtils = require('./date_utils'); const log = require('./log'); @@ -39,6 +38,7 @@ function moveEntityChangeToTop(entityName, entityId) { function addEntityChangesForSector(entityName, entityPrimaryKey, sector) { const startTime = Date.now(); + const repository = require('./repository'); sql.transactional(() => { const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} WHERE SUBSTR(${entityPrimaryKey}, 1, 1) = ?`, [sector]); diff --git a/src/services/sync.js b/src/services/sync.js index 4701633e2..817b29a29 100644 --- a/src/services/sync.js +++ b/src/services/sync.js @@ -253,6 +253,8 @@ async function checkContentHash(syncContext) { return failedChecks.length > 0; } +const PAGE_SIZE = 1000000; + async function syncRequest(syncContext, method, requestPath, body) { body = body ? JSON.stringify(body) : ''; @@ -261,7 +263,7 @@ async function syncRequest(syncContext, method, requestPath, body) { let response; const requestId = utils.randomString(10); - const pageCount = Math.min(1, Math.ceil(body.length / 1000000)); + const pageCount = Math.max(1, Math.ceil(body.length / PAGE_SIZE)); for (let pageIndex = 0; pageIndex < pageCount; pageIndex++) { const opts = { @@ -274,13 +276,11 @@ async function syncRequest(syncContext, method, requestPath, body) { pageCount, requestId }, - body, + body: body.substr(pageIndex * PAGE_SIZE, Math.min(PAGE_SIZE, body.length - pageIndex * PAGE_SIZE)), proxy: proxyToggle ? syncOptions.getSyncProxy() : null }; response = await utils.timeLimit(request.exec(opts), timeout); - - console.log("response", response); } return response; diff --git a/src/services/sync_update.js b/src/services/sync_update.js index d10e7b32b..6a33e37de 100644 --- a/src/services/sync_update.js +++ b/src/services/sync_update.js @@ -23,35 +23,36 @@ function updateEntity(entityChange, entity, sourceId) { } } -function updateNormalEntity(entityChange, entity, sourceId) { - const {utcDateChanged, hash, isErased} = sql.getRow(` +function updateNormalEntity(remoteEntityChange, entity, sourceId) { + const localEntityChange = sql.getRow(` SELECT utcDateChanged, hash, isErased FROM entity_changes - WHERE entityName = ? AND entityId = ?`, [entityChange.entityName, entityChange.entityId]); + WHERE entityName = ? AND entityId = ?`, [remoteEntityChange.entityName, remoteEntityChange.entityId]); - if (!isErased && entityChange.isErased) { + if (localEntityChange && !localEntityChange.isErased && remoteEntityChange.isErased) { sql.transactional(() => { const primaryKey = entityConstructor.getEntityFromEntityName(entityName).primaryKeyName; - sql.execute(`DELETE FROM ${entityChange.entityName} WHERE ${primaryKey} = ?`, entityChange.entityId); + sql.execute(`DELETE FROM ${remoteEntityChange.entityName} WHERE ${primaryKey} = ?`, remoteEntityChange.entityId); - entityChangesService.addEntityChange(entityChange, sourceId); + entityChangesService.addEntityChange(remoteEntityChange, sourceId); }); return true; } - if (utcDateChanged < entityChange.utcDateChanged - || hash !== entityChange.hash // sync error, we should still update + if (!localEntityChange + || localEntityChange.utcDateChanged < remoteEntityChange.utcDateChanged + || localEntityChange.hash !== remoteEntityChange.hash // sync error, we should still update ) { - if (['note_contents', 'note_revision_contents'].includes(entityChange.entityName)) { + if (['note_contents', 'note_revision_contents'].includes(remoteEntityChange.entityName)) { entity.content = handleContent(entity.content); } sql.transactional(() => { - sql.replace(entityChange.entityName, entity); + sql.replace(remoteEntityChange.entityName, entity); - entityChangesService.addEntityChange(entityChange, sourceId); + entityChangesService.addEntityChange(remoteEntityChange, sourceId); }); return true;