Update dependencies and refactor database handling in restore.js

It might even work - aka: trust but verify

- Added a new script to update libraries in package.json.
- Updated dependencies to specific versions for better stability.
- Refactored the crontabs function in restore.js to use lowdb for database operations instead of nedb, improving performance and maintainability.
pull/257/head
Arrel Neumiller 2025-07-15 19:11:34 +01:00
parent c69ef51a04
commit b48b37dc6f
4 changed files with 1386 additions and 919 deletions

View File

@ -1,15 +1,22 @@
/*jshint esversion: 6*/ /*jshint esversion: 6*/
//load database //load database
var Datastore = require('nedb'); const { Low, JSONFile } = require('lowdb');
var path = require("path"); var path = require("path");
exports.db_folder = process.env.CRON_DB_PATH === undefined ? path.join(__dirname, "crontabs") : process.env.CRON_DB_PATH; exports.db_folder = process.env.CRON_DB_PATH === undefined ? path.join(__dirname, "crontabs") : process.env.CRON_DB_PATH;
console.log("Cron db path: " + exports.db_folder); console.log("Cron db path: " + exports.db_folder);
exports.log_folder = path.join(exports.db_folder, 'logs'); exports.log_folder = path.join(exports.db_folder, 'logs');
exports.env_file = path.join(exports.db_folder, 'env.db'); exports.env_file = path.join(exports.db_folder, 'env.db');
exports.crontab_db_file = path.join(exports.db_folder, 'crontab.db'); exports.crontab_db_file = path.join(exports.db_folder, 'crontab.json');
var db = new Datastore({ filename: exports.crontab_db_file}); const adapter = new JSONFile(exports.crontab_db_file);
const db = new Low(adapter);
async function initDB() {
await db.read();
db.data = db.data || { crontabs: [] };
await db.write();
}
initDB();
var cronPath = "/tmp"; var cronPath = "/tmp";
if(process.env.CRON_PATH !== undefined) { if(process.env.CRON_PATH !== undefined) {
console.log(`Path to crond files set using env variables ${process.env.CRON_PATH}`); console.log(`Path to crond files set using env variables ${process.env.CRON_PATH}`);
@ -27,7 +34,7 @@ var cronstrue = require('cronstrue/i18n');
var humanCronLocate = process.env.HUMANCRON ?? "en" var humanCronLocate = process.env.HUMANCRON ?? "en"
if (!fs.existsSync(exports.log_folder)){ if (!fs.existsSync(exports.log_folder)){
fs.mkdirSync(exports.log_folder); fs.mkdirSync(exports.log_folder);
} }
crontab = function(name, command, schedule, stopped, logging, mailing){ crontab = function(name, command, schedule, stopped, logging, mailing){
@ -46,73 +53,84 @@ crontab = function(name, command, schedule, stopped, logging, mailing){
return data; return data;
}; };
exports.create_new = function(name, command, schedule, logging, mailing){ exports.create_new = async function(name, command, schedule, logging, mailing){
var tab = crontab(name, command, schedule, false, logging, mailing); await initDB();
tab.created = new Date().valueOf(); var tab = crontab(name, command, schedule, false, logging, mailing);
tab.saved = false; tab.created = new Date().valueOf();
db.insert(tab); tab.saved = false;
tab._id = Date.now().toString() + Math.random().toString(36).substr(2, 9);
db.data.crontabs.push(tab);
await db.write();
}; };
exports.update = function(data){ exports.update = async function(data){
var tab = crontab(data.name, data.command, data.schedule, null, data.logging, data.mailing); await initDB();
tab.saved = false; var tab = crontab(data.name, data.command, data.schedule, null, data.logging, data.mailing);
db.update({_id: data._id}, tab); tab.saved = false;
const idx = db.data.crontabs.findIndex(t => t._id === data._id);
if (idx !== -1) {
db.data.crontabs[idx] = { ...db.data.crontabs[idx], ...tab };
await db.write();
}
}; };
exports.status = function(_id, stopped){ exports.status = async function(_id, stopped){
db.update({_id: _id},{$set: {stopped: stopped, saved: false}}); await initDB();
const idx = db.data.crontabs.findIndex(t => t._id === _id);
if (idx !== -1) {
db.data.crontabs[idx].stopped = stopped;
db.data.crontabs[idx].saved = false;
await db.write();
}
}; };
exports.remove = function(_id){ exports.remove = async function(_id){
db.remove({_id: _id}, {}); await initDB();
db.data.crontabs = db.data.crontabs.filter(t => t._id !== _id);
await db.write();
}; };
// Iterates through all the crontab entries in the db and calls the callback with the entries // Iterates through all the crontab entries in the db and calls the callback with the entries
exports.crontabs = function(callback){ exports.crontabs = async function(callback){
db.find({}).sort({ created: -1 }).exec(function(err, docs){ await initDB();
for(var i=0; i<docs.length; i++){ let docs = db.data.crontabs.slice().sort((a, b) => b.created - a.created);
if(docs[i].schedule == "@reboot") for(let i=0; i<docs.length; i++){
docs[i].next = "Next Reboot"; if(docs[i].schedule == "@reboot")
else docs[i].next = "Next Reboot";
try { else
docs[i].human = cronstrue.toString(docs[i].schedule, { locale: humanCronLocate }); try {
docs[i].next = cron_parser.parseExpression(docs[i].schedule).next().toString(); docs[i].human = cronstrue.toString(docs[i].schedule, { locale: humanCronLocate });
} catch(err) { docs[i].next = cron_parser.parseExpression(docs[i].schedule).next().toString();
console.error(err); } catch(err) {
docs[i].next = "invalid"; console.error(err);
} docs[i].next = "invalid";
} }
callback(docs); }
}); callback(docs);
}; };
exports.get_crontab = function(_id, callback) { exports.get_crontab = async function(_id, callback) {
db.find({_id: _id}).exec(function(err, docs){ await initDB();
callback(docs[0]); const doc = db.data.crontabs.find(t => t._id === _id);
}); callback(doc);
}; };
exports.runjob = function(_id) { exports.runjob = async function(_id) {
db.find({_id: _id}).exec(function(err, docs){ await initDB();
let res = docs[0]; let res = db.data.crontabs.find(t => t._id === _id);
if (!res) return;
let env_vars = exports.get_env() let env_vars = exports.get_env();
let crontab_job_string_command = make_command(res);
let crontab_job_string_command = make_command(res) crontab_job_string_command = add_env_vars(env_vars, crontab_job_string_command);
console.log("Running job");
crontab_job_string_command = add_env_vars(env_vars, crontab_job_string_command) console.log("ID: " + _id);
console.log("Original command: " + res.command);
console.log("Running job") console.log("Executed command: " + crontab_job_string_command);
console.log("ID: " + _id) exec(crontab_job_string_command, function(error, stdout, stderr){
console.log("Original command: " + res.command) if (error) {
console.log("Executed command: " + crontab_job_string_command) console.log(error);
}
exec(crontab_job_string_command, function(error, stdout, stderr){ });
if (error) {
console.log(error)
}
});
});
}; };
make_command = function(tab) { make_command = function(tab) {
@ -172,48 +190,47 @@ add_env_vars = function(env_vars, command) {
} }
// Set actual crontab file from the db // Set actual crontab file from the db
exports.set_crontab = function(env_vars, callback) { exports.set_crontab = async function(env_vars, callback) {
exports.crontabs( function(tabs){ await exports.crontabs(async function(tabs){
var crontab_string = ""; var crontab_string = "";
if (env_vars) { if (env_vars) {
crontab_string += env_vars; crontab_string += env_vars;
crontab_string += "\n"; crontab_string += "\n";
} }
tabs.forEach(function(tab){ tabs.forEach(function(tab){
if(!tab.stopped) { if(!tab.stopped) {
crontab_string += tab.schedule crontab_string += tab.schedule;
crontab_string += " " crontab_string += " ";
crontab_string += make_command(tab) crontab_string += make_command(tab);
crontab_string += "\n"; crontab_string += "\n";
} }
});
fs.writeFile(exports.env_file, env_vars, function(err) {
if (err) {
console.error(err);
callback(err);
}
// In docker we're running as the root user, so we need to write the file as root and not crontab
var fileName = process.env.CRON_IN_DOCKER !== undefined ? "root" : "crontab";
fs.writeFile(path.join(cronPath, fileName), crontab_string, function(err) {
if (err) {
console.error(err);
return callback(err);
}
exec("crontab " + path.join(cronPath, fileName), function(err) {
if (err) {
console.error(err);
return callback(err);
}
else {
db.update({},{$set: {saved: true}}, {multi: true});
callback();
}
});
});
});
}); });
fs.writeFile(exports.env_file, env_vars, function(err) {
if (err) {
console.error(err);
callback(err);
}
var fileName = process.env.CRON_IN_DOCKER !== undefined ? "root" : "crontab";
fs.writeFile(path.join(cronPath, fileName), crontab_string, function(err) {
if (err) {
console.error(err);
return callback(err);
}
exec("crontab " + path.join(cronPath, fileName), async function(err) {
if (err) {
console.error(err);
return callback(err);
}
else {
await initDB();
db.data.crontabs.forEach(tab => tab.saved = true);
await db.write();
callback();
}
});
});
});
});
}; };
exports.get_backup_names = function(){ exports.get_backup_names = function(){
@ -246,54 +263,42 @@ exports.backup = (callback) => {
}; };
exports.restore = function(db_name){ exports.restore = function(db_name){
fs.createReadStream(path.join(exports.db_folder, db_name)).pipe(fs.createWriteStream(exports.crontab_db_file)); fs.createReadStream(path.join(exports.db_folder, db_name)).pipe(fs.createWriteStream(exports.crontab_db_file));
db.loadDatabase(); // reload the database // For lowdb, reload is just re-reading the file
initDB();
}; };
exports.reload_db = function(){ exports.reload_db = function(){
db.loadDatabase(); initDB();
};
exports.get_env = function(){
if (fs.existsSync(exports.env_file)) {
return fs.readFileSync(exports.env_file , 'utf8').replace("\n", "\n");
}
return "";
}; };
exports.import_crontab = function(){ exports.import_crontab = function(){
exec("crontab -l", function(error, stdout, stderr){ exec("crontab -l", async function(error, stdout, stderr){
var lines = stdout.split("\n"); var lines = stdout.split("\n");
var namePrefix = new Date().getTime(); var namePrefix = new Date().getTime();
await initDB();
lines.forEach(function(line, index){ for (let index = 0; index < lines.length; index++) {
line = line.replace(/\t+/g, ' '); let line = lines[index].replace(/\t+/g, ' ');
var regex = /^((\@[a-zA-Z]+\s+)|(([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+))/; var regex = /^((\@[a-zA-Z]+\s+)|(([^-]+)\s+([^-]+)\s+([^-]+)\s+([^-]+)\s+([^-]+)\s+))/;
var command = line.replace(regex, '').trim(); var command = line.replace(regex, '').trim();
var schedule = line.replace(command, '').trim(); var schedule = line.replace(command, '').trim();
var is_valid = false;
var is_valid = false; try { is_valid = cron_parser.parseString(line).expressions.length > 0; } catch (e){}
try { is_valid = cron_parser.parseString(line).expressions.length > 0; } catch (e){} if(command && schedule && is_valid){
var name = namePrefix + '_' + index;
if(command && schedule && is_valid){ let doc = db.data.crontabs.find(t => t.command === command && t.schedule === schedule);
var name = namePrefix + '_' + index; if(!doc){
await exports.create_new(name, command, schedule, null);
db.findOne({ command: command, schedule: schedule }, function(err, doc) { }
if(err) { else{
throw err; doc.command = command;
} doc.schedule = schedule;
if(!doc){ await exports.update(doc);
exports.create_new(name, command, schedule, null); }
} }
else{ }
doc.command = command; await db.write();
doc.schedule = schedule; });
exports.update(doc);
}
});
}
});
});
}; };
exports.autosave_crontab = function(callback) { exports.autosave_crontab = function(callback) {

1982
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -5,20 +5,21 @@
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"start": "node app.js", "start": "node app.js",
"test": "node tests/test.js" "test": "node tests/test.js",
"update-libs": "npx npm-check-updates -u && npm install"
}, },
"dependencies": { "dependencies": {
"body-parser": "latest", "body-parser": "latest",
"connect-busboy": "latest", "connect-busboy": "latest",
"cron-parser": "latest", "cron-parser": "latest",
"cronstrue": "latest", "cronstrue": "latest",
"ejs": "latest", "ejs": "^3.1.10",
"express": "latest", "express": "^5.1.0",
"express-basic-auth": "^1.2.0", "express-basic-auth": "^1.2.1",
"lowdb": "^7.0.1",
"mime-types": "latest", "mime-types": "latest",
"moment": "latest", "moment": "^2.30.1",
"nedb": "latest", "nodemailer": "^7.0.5"
"nodemailer": "^6.7.3"
}, },
"engines": { "engines": {
"node": ">=15.0.0" "node": ">=15.0.0"
@ -36,5 +37,8 @@
"node" "node"
], ],
"author": "Suresh Alse", "author": "Suresh Alse",
"license": "MIT" "license": "MIT",
"devDependencies": {
"npm-check-updates": "^18.0.1"
}
} }

View File

@ -1,18 +1,18 @@
//load database //load database
var Datastore = require('nedb'); const { Low, JSONFile } = require('lowdb');
var crontab = require("./crontab"); var crontab = require("./crontab");
var path = require("path"); var path = require("path");
var exec = require('child_process').exec; var exec = require('child_process').exec;
var fs = require('fs'); var fs = require('fs');
exports.crontabs = function(db_name, callback){ exports.crontabs = async function(db_name, callback){
var db = new Datastore({filename: path.join(crontab.db_folder, db_name)}); const adapter = new JSONFile(path.join(crontab.db_folder, db_name));
db.loadDatabase(function (err) { const db = new Low(adapter);
}); await db.read();
db.find({}).sort({ created: -1 }).exec(function(err, docs){ db.data = db.data || { crontabs: [] };
callback(docs); let docs = db.data.crontabs.slice().sort((a, b) => b.created - a.created);
}); callback(docs);
}; };
exports.delete = function(db_name){ exports.delete = function(db_name){