diff --git a/.gitignore b/.gitignore index e2a7107..9199f11 100644 --- a/.gitignore +++ b/.gitignore @@ -122,5 +122,6 @@ dist/* !dist/.keep release/* !release/.keep +exports package-lock.json config.json diff --git a/README.md b/README.md index 6fc3147..52700fa 100644 --- a/README.md +++ b/README.md @@ -8,9 +8,9 @@ This supports mailing for logging what is happening to the databases, future pla ### Features -- [ ] Database Exporting +- [x] Database Exporting - [ ] Log all errors to a final log -- [ ] Email the log through email +- [x] Email the log through email - [ ] Discord WebHook - [ ] Telegram Hook - [ ] Docker Support diff --git a/config.example.json b/config.example.json index 2989d2b..e200bc3 100644 --- a/config.example.json +++ b/config.example.json @@ -3,7 +3,9 @@ "parallel_nodes": 2, "parallel_node_tasks": 5, "separate_into_tables": false, - "mysql_dump_settings": "--no-comments" + "mysql_dump_settings": "--no-comments", + "database_export_path": "./exports", + "logs_path": "./logs" }, "smtp": { @@ -12,7 +14,8 @@ "ssl": true, "username": "", "password": "", - "email_from": "" + "email_from": "", + "to": "" }, "nodes": [ diff --git a/src/app.ts b/src/app.ts index a326c79..ae6a401 100644 --- a/src/app.ts +++ b/src/app.ts @@ -4,6 +4,11 @@ import { getConfig, NodeConnection } from "./helper/config"; import { SystemControl } from "./models/SystemController"; // Import the worker processes import { DataWorker } from "./models/WorkerProcess"; +// Import the mail model +import { sendMail } from "./models/MailModel"; +// Import the file system library +import * as fs from "fs"; +import * as path from "path"; // Global system control singleton ((global as any).logger as SystemControl) = new SystemControl(); @@ -21,7 +26,7 @@ async function main() { // Spawn in the DataWorkers for ( let i: number = 0; i < config.options.parallel_nodes; i++ ) { // Create a new data worker - workerNodes.push(new DataWorker(config.options.parallel_node_tasks)); + workerNodes.push(new DataWorker(config.options.parallel_node_tasks, config.options.database_export_path)); } /** @@ -30,7 +35,6 @@ async function main() { let workerIndex = 0; // Start assigning nodes to the DataWorkers for ( let node of config.nodes ) { - console.log(`Adding workerIndex[${workerIndex}]`); // Add the task to the node workerNodes[workerIndex].addNodeTask(node); // Increment the worker index @@ -38,18 +42,49 @@ async function main() { if ( workerIndex >= workerNodes.length ) workerIndex = 0; } - console.log(`Succesfully added ${config.nodes.length} Tasks to ${workerNodes.length} Workers`); + const responseLogs: Map = new Map(); - for( let w of workerNodes ) { - let resolves: PromiseSettledResult[] = await w.startProcessing(); + // @TODO: Actually make these run in parallel + for ( let w of workerNodes ) { - for( let r of resolves ) { - const reason = (r as any).reason; + const workerStartTime = new Date().getTime(); + let resolves: PromiseSettledResult[] | string = await w.startProcessing(); + const workerEndTime: string = Number((workerStartTime - new Date().getTime()) / 1000).toFixed(2) + "s" + let myLog: string[] = [ + `runTime: ${workerEndTime}`, + ]; - console.log(w.getID(), r); + for ( let r of resolves ) { + myLog.push(JSON.stringify(r, null, 2)); } + responseLogs.set(w.getID(), myLog); + } + + // Log to file + if ( !!config.options.logs_path ) { + // Log to a JSON file path + const logFilePath: string = path.join(config.options.logs_path, new Date().getTime() + ".json"); + const dirPath = path.dirname(logFilePath); + // Create the directory if it don't exist. + if ( !fs.existsSync(dirPath) ) { fs.mkdirSync(dirPath); } + + // Write to file + fs.writeFileSync(logFilePath, JSON.stringify(responseLogs as Object)); + } + + // Send the log via email + if ( !!config.smtp ) { + let htmlContent = `

Database Log Report


`; + + responseLogs.forEach( (value, key) => { + htmlContent += `
${value}
` + htmlContent += `${JSON.stringify(key, null, 4)}` + htmlContent += "
"; + }) + + sendMail(htmlContent, config.smtp.to); } } diff --git a/src/helper/config.ts b/src/helper/config.ts index 545cb43..bfc56e2 100644 --- a/src/helper/config.ts +++ b/src/helper/config.ts @@ -27,6 +27,16 @@ export interface ConfigOptions { * A string of CLI options for the mysql dump command */ mysql_dump_settings: string, + + /** + * Path to where to export the database to + */ + database_export_path: string, + + /** + * Path to the logs file + */ + logs_path?: string } /** @@ -58,6 +68,11 @@ export interface SMTPConfig { * Secure Connection */ ssl : boolean, + + /** + * Main target email address + */ + to : string, } export interface NodeConnectionMail { @@ -139,7 +154,7 @@ export interface ConfigObject { * SMTP details for sending out email notifications * for real-time updates on erros, health, and backups. */ - smtp: SMTPConfig, + smtp?: SMTPConfig, /** * An array of Node connections that we should backup diff --git a/src/models/MailModel.ts b/src/models/MailModel.ts index a7d84fd..41b6e4d 100644 --- a/src/models/MailModel.ts +++ b/src/models/MailModel.ts @@ -11,12 +11,12 @@ function getTransporter() : nodemailer.Transporter { // Generate the transporter const transporter = nodemailer.createTransport({ - host: config?.smtp.host, - port: config?.smtp.port, - secure: config?.smtp.ssl, + host: config?.smtp?.host, + port: config?.smtp?.port, + secure: config?.smtp?.ssl, auth: { - user: config?.smtp.username, - pass: config?.smtp.password, + user: config?.smtp?.username, + pass: config?.smtp?.password, } }); @@ -27,22 +27,28 @@ function getTransporter() : nodemailer.Transporter { /** * Send out an email with the config details. */ -export async function sendMail(mailContent: string, toAddress: string, nodeConfig: NodeConnectionMail, ccAddresses?: Array) { +export async function sendMail(mailContent: string, toAddress: string, ccAddresses: Array = []) { // Get the transporter const transporter = getTransporter(); // Generate the mail options const mailOptions : nodemailer.SendMailOptions = { - from: config?.smtp.email_from, - to: nodeConfig.email_to, - cc: nodeConfig.email_cc, + subject: "Database Backup Update", + from: config?.smtp?.email_from, + to: toAddress, + cc: ccAddresses, html: mailContent, } await new Promise((_r, _e) => { transporter.sendMail(mailOptions, (err, info) => { - + if ( err ) { + console.error(err); + } + + console.log(info); + }) }) diff --git a/src/models/WorkerProcess.ts b/src/models/WorkerProcess.ts index f36ae2f..2615f3f 100644 --- a/src/models/WorkerProcess.ts +++ b/src/models/WorkerProcess.ts @@ -1,7 +1,20 @@ import * as mysql from "mysql"; import { NodeConnection } from "../helper/config"; import { uuid } from "../helper/mainHelper"; +import * as child from 'child_process'; +import * as path from "path"; +import * as fs from "fs"; +export enum databaseExportLogType { + ERROR = "Error", + SUCCESS = "Success", +} + +export interface databaseExportLog { + data: any, + type: databaseExportLogType, + error?: Error, +} export class DataWorker { @@ -15,15 +28,20 @@ export class DataWorker { */ private parallelTasksCount: number; + private exportPath: string; + private id: string; /** * @param parallelTasks The amount of parallel tasks that this Worker can run at once */ - constructor(parallelTasks: number) { + constructor(parallelTasks: number, exportPath: string) { // Initialize the amount of paralel tasks to run at the same time this.parallelTasksCount = parallelTasks; + // Assign the export path + this.exportPath = exportPath; + // Generate an ID for the worker this.id = uuid(); } @@ -41,9 +59,14 @@ export class DataWorker { /** * Start processing the node tasks. */ - public startProcessing() : Promise[]> { + public startProcessing() : Promise[] | string> { return new Promise( async (_r, _e) => { try { + + if ( this.nodeTasks.length <= 0 ) { + return _r("No tasks to run, skipping"); + } + // Go through all of the tasks to run for ( let i = 0; i < this.nodeTasks.length; i++ ) { // Spawn in the task queue @@ -77,7 +100,7 @@ export class DataWorker { * @param task The task to process */ private processTask(task: NodeConnection) { - return new Promise( async (_r, _e) => { + return new Promise( async (_r, _e) => { // Connect to the mysql database to test out the connection const conn : mysql.Connection = mysql.createConnection({ host: task.hostname, @@ -102,9 +125,11 @@ export class DataWorker { return _e(error); } + let results: databaseExportLog[] = []; + try { // Dump the database - await this.dumpDatabase(conn, task); + results = await this.dumpDatabase(conn, task); } catch ( err ) { conn.destroy(); return _e(err); @@ -120,18 +145,120 @@ export class DataWorker { } catch ( err ) {}; // Stop the code and mark as success - _r(`Succesfully ran task ${task.name}`); + _r(results); }) } private dumpDatabase(conn : mysql.Connection, task: NodeConnection) { - return new Promise((_r, _e) => { + return new Promise( async (_r, _e) => { // Get all of the databases from the connection - let databaseList: string[] = []; + const databaseList: string[] = []; + let rawDatabaseList: string[] = []; - + try { + rawDatabaseList = await new Promise((res, er) => { + conn.query("SHOW DATABASES", (err, rez, fields) => { + if ( err ) { + // Could not list databases, there's definitely something wrong going to happen, do a forceful stop. + return _e(err); + } + let databaseList: string[] = []; + + for ( let db of rez ) { + databaseList.push(db['Database']); + } + + res(databaseList); + }) + + }); + } catch ( ex ) { + return _e(ex); + } + + // Filter the database list based on the blacklist/whitelist + for ( let db of rawDatabaseList ) { + if ( !!task.databases ) { + + // Check the whitelist + if ( !!task.databases.whitelist ) { + if ( !task.databases.whitelist?.includes(db) ) { + continue; + } + } + + // Check the blacklist + if ( !!task.databases.blacklist ) { + if ( task.databases.blacklist?.includes(db) ) { + continue; + } + } + } + + // The database passed our filters, add it to the final list! + databaseList.push(db); + } + + // List of logs for the different database dumps + const resultLogs: databaseExportLog[] = []; + + // Run the mysqldump for every database + for ( let db of databaseList ) { + let startTime = new Date().getTime(); + try { + await new Promise((res, err) => { + // Get the path to export to + const exportPath: string = path.join( this.exportPath , `${db}.sql`); + // Make sure the folder exists + if ( !fs.existsSync(path.dirname(exportPath)) ) { + fs.mkdirSync(path.dirname(exportPath)); + } + + // Generate the export command + const exportCommand: string = `mysqldump -u${task.username} -p${task.password} -h${task.hostname} -P${task.port} ${db} > ${exportPath}`; + // Execute the export process + child.exec(exportCommand, (execErr, stdout, stderr) => { + if ( execErr ) { + + resultLogs.push({ + type: databaseExportLogType.ERROR, + data: { + runTime: (new Date().getTime() - startTime) + "ms", + stderr: stderr + }, + error: execErr, + }); + + return res(); + } + + resultLogs.push({ + type: databaseExportLogType.SUCCESS, + data: { + runTime: (new Date().getTime() - startTime) + "ms", + stdout: stdout, + }, + }); + + res(); + }); + + }); + } catch ( error ) { + resultLogs.push({ + type: databaseExportLogType.ERROR, + data: { + runTime: (new Date().getTime() - startTime) + "ms", + }, + error: error + }); + } + } + + // Resolve the promise + _r(resultLogs); }) }