summaryrefslogtreecommitdiff
path: root/daemons/jobs.js
diff options
context:
space:
mode:
authorRaindropsSys <raindrops@equestria.dev>2024-03-30 23:40:33 +0100
committerRaindropsSys <raindrops@equestria.dev>2024-03-30 23:40:33 +0100
commit6b796258d413f00e498ce7f80f73a9f6c061f29c (patch)
tree49e64a5dd4cde2acff7f0a93ed3f8e20e1cb2dc8 /daemons/jobs.js
parent5860551daa0f60103ad24e93da29f401a653f144 (diff)
downloadpluralconnect-6b796258d413f00e498ce7f80f73a9f6c061f29c.tar.gz
pluralconnect-6b796258d413f00e498ce7f80f73a9f6c061f29c.tar.bz2
pluralconnect-6b796258d413f00e498ce7f80f73a9f6c061f29c.zip
Updated 5 files, added 2 files, deleted 495 files and renamed 7 files (automated)
Diffstat (limited to 'daemons/jobs.js')
-rw-r--r--daemons/jobs.js110
1 files changed, 110 insertions, 0 deletions
diff --git a/daemons/jobs.js b/daemons/jobs.js
new file mode 100644
index 0000000..0ca8552
--- /dev/null
+++ b/daemons/jobs.js
@@ -0,0 +1,110 @@
+const fs = require('fs');
+const child_process = require('child_process');
+
+let jobsList = [];
+let jobs = [];
+let history = require('../data/history.json');
+
+fs.writeFileSync("../data/running.json", "null");
+
+setInterval(() => {
+ if (JSON.stringify(fs.readdirSync("../data/jobs")) !== JSON.stringify(jobsList)) {
+ console.log("Updating the jobs list...");
+ jobs = fs.readdirSync("../data/jobs").map((i) => {
+ let obj = JSON.parse(fs.readFileSync("../data/jobs/" + i).toString());
+ obj["_id"] = i;
+ return obj;
+ });
+ jobsList = fs.readdirSync("../data/jobs");
+ }
+}, 1000);
+
+setTimeout(() => {
+ setInterval(() => {
+ if (jobs.length > 0) {
+ let pickup = new Date();
+
+ console.log(jobs.length + " job(s)");
+ console.log("\nRunning jobs:");
+
+ for (let job of jobs) {
+ console.log(" " + job.name + " [" + job._id + "]");
+ let output;
+ let start;
+ let end;
+
+ fs.writeFileSync("../data/running.json", JSON.stringify(job._id));
+
+ try {
+ start = new Date();
+ output = child_process.execFileSync("php", [job.name + ".php", JSON.stringify(job.options)], { cwd: "../jobs" });
+ end = new Date();
+
+ let runtime = end.getTime() - start.getTime();
+ let description = "";
+
+ for (let name of Object.keys(job.options)) {
+ description = "," + name + "=" + JSON.stringify(job.options[name]);
+ }
+
+ fs.unlinkSync("../data/jobs/" + job._id);
+
+ history.unshift({
+ completed: true,
+ error: null,
+ options: job.options,
+ name: job.name + "(" + description.substring(1) + ")",
+ output: output.toString(),
+ time: runtime,
+ tracking: {
+ queue: new Date(job.date).toISOString(),
+ pickup: pickup.toISOString(),
+ start: start.toISOString(),
+ end: end.toISOString(),
+ logged: new Date().toISOString()
+ }
+ });
+
+ history = history.slice(0, 200);
+ fs.writeFileSync("../data/history.json", JSON.stringify(history));
+ fs.writeFileSync("../data/running.json", "null");
+ } catch (e) {
+ end = start ? new Date() : null;
+ console.log(" Failed to process job");
+ console.error(e);
+
+ let runtime = end ? (start ? end.getTime() - start.getTime() : null) : null;
+ let description = "";
+
+ for (let name of Object.keys(job.options)) {
+ description = "," + name + "=" + JSON.stringify(job.options[name]);
+ }
+
+ fs.unlinkSync("../data/jobs/" + job._id);
+
+ history.unshift({
+ completed: false,
+ error: e.stack,
+ options: job.options,
+ name: job.name + "(" + description.substring(1) + ")",
+ output: (output ?? e.stdout ?? e.stderr).toString(),
+ time: runtime,
+ tracking: {
+ queue: new Date(job.date).toISOString(),
+ pickup: pickup.toISOString(),
+ start: start ? start.toISOString() : null,
+ end: end ? end.toISOString() : null,
+ logged: new Date().toISOString()
+ }
+ });
+
+ history = history.slice(0, 200);
+ fs.writeFileSync("../data/history.json", JSON.stringify(history));
+ fs.writeFileSync("../data/running.json", "null");
+ }
+ }
+
+ console.log("\nCompleted");
+ }
+ }, 1000);
+}, 500);