separate sync for pull (implemented) and push (not yet)

This commit is contained in:
azivner 2017-10-26 20:31:31 -04:00
parent f6033705a7
commit 266727a21b
9 changed files with 116 additions and 73 deletions

View File

@ -5,7 +5,6 @@ process.on('unhandledRejection', error => {
console.log(error); console.log(error);
// but also try to log it into file // but also try to log it into file
// we're using .info() instead of .error() because simple-node-logger emits weird error for error()
require('../services/log').info(error); require('../services/log').info(error);
}); });

View File

@ -0,0 +1,5 @@
CREATE UNIQUE INDEX `IDX_notes_history_note_from_to` ON `notes_history` (
`note_id`,
`date_modified_from`,
`date_modified_to`
);

View File

@ -0,0 +1,3 @@
UPDATE options SET opt_name = 'last_synced_pull' WHERE opt_name = 'last_synced';
INSERT INTO options (opt_name, opt_value) VALUES ('last_synced_push', 0);

View File

@ -6,14 +6,14 @@ const auth = require('../../services/auth');
const sql = require('../../services/sql'); const sql = require('../../services/sql');
const migration = require('../../services/migration'); const migration = require('../../services/migration');
router.get('', auth.checkApiAuth, async (req, res, next) => { router.get('', auth.checkApiAuthWithoutMigration, async (req, res, next) => {
res.send({ res.send({
'db_version': parseInt(await sql.getOption('db_version')), 'db_version': parseInt(await sql.getOption('db_version')),
'app_db_version': migration.APP_DB_VERSION 'app_db_version': migration.APP_DB_VERSION
}); });
}); });
router.post('', auth.checkApiAuth, async (req, res, next) => { router.post('', auth.checkApiAuthWithoutMigration, async (req, res, next) => {
const migrations = await migration.migrate(); const migrations = await migration.migrate();
res.send({ res.send({

View File

@ -4,7 +4,7 @@ const express = require('express');
const router = express.Router(); const router = express.Router();
const auth = require('../services/auth'); const auth = require('../services/auth');
router.get('', auth.checkAuth, (req, res, next) => { router.get('', auth.checkAuthWithoutMigration, (req, res, next) => {
res.render('migration', {}); res.render('migration', {});
}); });

View File

@ -15,6 +15,15 @@ async function checkAuth(req, res, next) {
} }
} }
async function checkAuthWithoutMigration(req, res, next) {
if (!req.session.loggedIn) {
res.redirect("login");
}
else {
next();
}
}
async function checkApiAuth(req, res, next) { async function checkApiAuth(req, res, next) {
if (!req.session.loggedIn && req.header("auth") !== "sync") { if (!req.session.loggedIn && req.header("auth") !== "sync") {
res.sendStatus(401); res.sendStatus(401);
@ -28,7 +37,18 @@ async function checkApiAuth(req, res, next) {
} }
} }
async function checkApiAuthWithoutMigration(req, res, next) {
if (!req.session.loggedIn && req.header("auth") !== "sync") {
res.sendStatus(401);
}
else {
next();
}
}
module.exports = { module.exports = {
checkAuth, checkAuth,
checkApiAuth checkAuthWithoutMigration,
checkApiAuth,
checkApiAuthWithoutMigration
}; };

View File

@ -19,7 +19,8 @@ function info(message) {
} }
function error(message) { function error(message) {
logger.error(message); // we're using .info() instead of .error() because simple-node-logger emits weird error for error()
logger.info(message);
} }
const requestBlacklist = [ "/api/audit", "/libraries", "/javascripts", "/images", "/stylesheets" ]; const requestBlacklist = [ "/api/audit", "/libraries", "/javascripts", "/images", "/stylesheets" ];

View File

@ -3,7 +3,7 @@ const sql = require('./sql');
const fs = require('fs-extra'); const fs = require('fs-extra');
const log = require('./log'); const log = require('./log');
const APP_DB_VERSION = 11; const APP_DB_VERSION = 13;
const MIGRATIONS_DIR = "./migrations"; const MIGRATIONS_DIR = "./migrations";
async function migrate() { async function migrate() {

View File

@ -10,78 +10,92 @@ const SYNC_SERVER = 'http://localhost:3000';
let syncInProgress = false; let syncInProgress = false;
async function sync() { async function pullSync() {
try { const lastSynced = parseInt(await sql.getOption('last_synced_pull'));
syncInProgress = true;
const resp = await rp({
uri: SYNC_SERVER + '/api/sync/changed/' + lastSynced,
headers: {
auth: 'sync'
},
json: true
});
try {
await sql.beginTransaction();
for (const treeItem of resp.tree) {
delete treeItem['id'];
await sql.insert("notes_tree", treeItem, true);
log.info("Syncing notes_tree " + treeItem.note_id);
}
for (const audit of resp.audit_log) {
delete audit['id'];
await sql.insert("audit_log", audit, true);
log.info("Syncing audit_log for noteId=" + audit.note_id);
}
for (const noteId of resp.notes) {
const note = await rp({
uri: SYNC_SERVER + "/api/sync/note/" + noteId + "/" + lastSynced,
headers: {
auth: 'sync'
},
json: true
});
console.log(noteId);
await sql.insert("notes", note.detail, true);
await sql.remove("images", noteId);
for (const image of note.images) {
await sql.insert("images", image);
}
for (const history of note.history) {
delete history['id'];
await sql.insert("notes_history", history);
}
}
await sql.setOption('last_synced_pull', syncTimestamp);
await sql.commit();
}
catch (e) {
await sql.rollback();
throw e;
}
}
async function pushSync() {
}
async function sync() {
if (syncInProgress) {
return;
}
syncInProgress = true;
try {
if (!await migration.isDbUpToDate()) { if (!await migration.isDbUpToDate()) {
return; return;
} }
const lastSynced = parseInt(await sql.getOption('last_synced')); await pushSync();
const resp = await rp({ await pullSync();
uri: SYNC_SERVER + '/api/sync/changed/' + lastSynced,
headers: {
auth: 'sync'
},
json: true
});
try {
await sql.beginTransaction();
for (const treeItem of resp.tree) {
delete treeItem['id'];
await sql.insert("notes_tree", treeItem, true);
log.info("Syncing notes_tree " + treeItem.note_id);
}
for (const audit of resp.audit_log) {
delete audit['id'];
await sql.insert("audit_log", audit, true);
log.info("Syncing audit_log for noteId=" + audit.note_id);
}
for (const noteId of resp.notes) {
const note = await rp({
uri: SYNC_SERVER + "/api/sync/note/" + noteId + "/" + lastSynced,
headers: {
auth: 'sync'
},
json: true
});
console.log(noteId);
await sql.insert("notes", note.detail, true);
await sql.remove("images", noteId);
for (const image of note.images) {
await sql.insert("images", image);
}
for (const history of note.history) {
delete history['id'];
await sql.insert("notes_history", history);
}
}
await sql.setOption('last_synced', syncTimestamp);
await sql.commit();
}
catch (e) {
await sql.rollback();
throw e;
}
} }
catch (e) { catch (e) {
log.error("sync failed: " + e.stack); log.error("sync failed: " + e.stack);
@ -93,4 +107,5 @@ async function sync() {
setInterval(sync, 60000); setInterval(sync, 60000);
// kickoff initial sync immediately
setTimeout(sync, 1000); setTimeout(sync, 1000);