diff --git a/plugins/reddit.js b/plugins/reddit.js index 6959dfae4..149077ec7 100644 --- a/plugins/reddit.js +++ b/plugins/reddit.js @@ -7,7 +7,8 @@ const log = require('../services/log'); const utils = require('../services/utils'); const unescape = require('unescape'); const attributes = require('../services/attributes'); -const options = require('../services/options'); +const sync_mutex = require('../services/sync_mutex'); +const config = require('../services/config'); const REDDIT_ROOT = 'reddit_root'; @@ -106,7 +107,7 @@ async function getDateNoteIdForReddit(dateTimeStr, rootNoteId) { return redditDateNoteId; } -async function importReddit(accountName, afterId = null) { +async function importComments(accountName, afterId = null) { let rootNoteId = await sql.getFirstValue(`SELECT notes.note_id FROM notes JOIN attributes USING(note_id) WHERE attributes.name = '${REDDIT_ROOT}' AND notes.is_deleted = 0`); @@ -173,38 +174,47 @@ async function importReddit(accountName, afterId = null) { if (listing.data.after && importedComments > 0) { log.info("Reddit: Importing from next page of comments ..."); - importedComments += await importReddit(accountName, listing.data.after); + importedComments += await importComments(accountName, listing.data.after); } return importedComments; } +let redditAccounts = []; + +async function runImport() { + // technically mutex shouldn't be necessary but we want to avoid doing potentially expensive import + // concurrently with sync + await sync_mutex.doExclusively(async () => { + let importedComments = 0; + + for (const account of redditAccounts) { + log.info("Reddit: Importing account " + account); + + importedComments += await importComments(account); + } + + log.info(`Reddit: Imported ${importedComments} comments.`); + }); +} + sql.dbReady.then(async () => { - const enabledOption = await options.getOptionOrNull("reddit_enabled"); - const accountsOption = await options.getOptionOrNull("reddit_accounts"); + console.log(config); - if (!enabledOption) { - await options.createOption("reddit_enabled", "false", true); - await options.createOption("reddit_accounts", "[]", true); + if (!config['Reddit'] || !config['Reddit']['enabled'] !== true) { return; } - if (enabledOption.opt_value !== "true") { - return; - } + const redditAccountsStr = config['Reddit']['accounts']; - if (!accountsOption) { + if (!redditAccountsStr) { log.info("Reddit: No reddit accounts defined in option 'reddit_accounts'"); } - const redditAccounts = JSON.parse(accountsOption.opt_value); - let importedComments = 0; + redditAccounts = redditAccountsStr.split(",").map(s => s.trim()); - for (const account of redditAccounts) { - log.info("Reddit: Importing account " + account); + const pollingIntervalInSeconds = config['Reddit']['pollingIntervalInSeconds'] || 3600; - importedComments += await importReddit(account); - } - - log.info(`Reddit: Imported ${importedComments} comments.`); + setInterval(runImport, pollingIntervalInSeconds * 1000); + setTimeout(runImport, 1000); }); diff --git a/services/backup.js b/services/backup.js index 80269921d..f143d4764 100644 --- a/services/backup.js +++ b/services/backup.js @@ -23,9 +23,8 @@ async function regularBackup() { async function backupNow() { // we don't want to backup DB in the middle of sync with potentially inconsistent DB state - const releaseMutex = await sync_mutex.acquire(); - try { + await sync_mutex.doExclusively(async () => { const now = utils.nowDate(); const backupFile = dataDir.BACKUP_DIR + "/" + "backup-" + utils.getDateTimeForFile() + ".db"; @@ -37,10 +36,7 @@ async function backupNow() { await sql.doInTransaction(async () => { await options.setOption('last_backup_date', now); }); - } - finally { - releaseMutex(); - } + }); } async function cleanupOldBackups() { diff --git a/services/consistency_checks.js b/services/consistency_checks.js index b49a45afe..9c73a7422 100644 --- a/services/consistency_checks.js +++ b/services/consistency_checks.js @@ -217,18 +217,14 @@ async function runAllChecks() { async function runChecks() { let errorList; let elapsedTimeMs; - const releaseMutex = await sync_mutex.acquire(); - try { + await sync_mutex.doExclusively(async () => { const startTime = new Date(); errorList = await runAllChecks(); elapsedTimeMs = new Date().getTime() - startTime.getTime(); - } - finally { - releaseMutex(); - } + }); if (errorList.length > 0) { log.info(`Consistency checks failed (took ${elapsedTimeMs}ms) with these errors: ` + JSON.stringify(errorList)); diff --git a/services/sync.js b/services/sync.js index 931bde8c4..6504f13e2 100644 --- a/services/sync.js +++ b/services/sync.js @@ -20,25 +20,25 @@ let proxyToggle = true; let syncServerCertificate = null; async function sync() { - const releaseMutex = await sync_mutex.acquire(); - try { - if (!await sql.isDbUpToDate()) { - return { - success: false, - message: "DB not up to date" - }; - } + await sync_mutex.doExclusively(async () => { + if (!await sql.isDbUpToDate()) { + return { + success: false, + message: "DB not up to date" + }; + } - const syncContext = await login(); + const syncContext = await login(); - await pushSync(syncContext); + await pushSync(syncContext); - await pullSync(syncContext); + await pullSync(syncContext); - await pushSync(syncContext); + await pushSync(syncContext); - await checkContentHash(syncContext); + await checkContentHash(syncContext); + }); return { success: true @@ -64,9 +64,6 @@ async function sync() { } } } - finally { - releaseMutex(); - } } async function login() { diff --git a/services/sync_mutex.js b/services/sync_mutex.js index 0fe21bd98..b68937acc 100644 --- a/services/sync_mutex.js +++ b/services/sync_mutex.js @@ -4,5 +4,20 @@ */ const Mutex = require('async-mutex').Mutex; +const instance = new Mutex(); -module.exports = new Mutex(); \ No newline at end of file +async function doExclusively(func) { + const releaseMutex = await instance.acquire(); + + try { + await func(); + } + finally { + releaseMutex(); + } + +} + +module.exports = { + doExclusively +}; \ No newline at end of file