chore: migrate backup scheduled mode from linux cron to go cron

This commit is contained in:
Jonas Kaninda
2024-09-28 07:26:33 +02:00
parent 29a58aa26d
commit cbb73ae89b
7 changed files with 137 additions and 117 deletions

View File

@@ -29,8 +29,9 @@ func init() {
//Backup //Backup
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled") BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. | Deprecated")
BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time") BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated")
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled") BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days") BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days")
BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression") BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression")

View File

@@ -8,126 +8,70 @@ package pkg
import ( import (
"fmt" "fmt"
"github.com/hpcloud/tail"
"github.com/jkaninda/mysql-bkup/utils" "github.com/jkaninda/mysql-bkup/utils"
"github.com/robfig/cron/v3"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"log" "log"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"time"
) )
func StartBackup(cmd *cobra.Command) { func StartBackup(cmd *cobra.Command) {
intro() intro()
//Set env dbConf = initDbConfig(cmd)
utils.SetEnv("STORAGE_PATH", storagePath) //Initialize backup configs
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") config := initBackupConfig(cmd)
//Get flag value and set env
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
backupRetention, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
executionMode, _ = cmd.Flags().GetString("mode")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
dbConf = getDbConfig(cmd)
//
if gpqPassphrase != "" {
encryption = true
}
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405"))
if disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
}
if cronExpression == "" {
switch storage {
case "s3":
s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "local":
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "ssh", "remote":
sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
case "ftp":
utils.Fatal("Not supported storage type: %s", storage)
default:
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
}
if config.cronExpression == "" {
BackupTask(dbConf, config)
} else { } else {
if utils.IsValidCronExpression(cronExpression) { if utils.IsValidCronExpression(config.cronExpression) {
scheduledMode(dbConf, storage) scheduledMode(dbConf, config)
} else { } else {
utils.Fatal("Cron expression is not valid: %s", cronExpression) utils.Fatal("Cron expression is not valid: %s", config.cronExpression)
} }
} }
} }
// Run in scheduled mode // Run in scheduled mode
func scheduledMode(db *dbConfig, storage string) { func scheduledMode(db *dbConfig, config *BackupConfig) {
fmt.Println()
fmt.Println("**********************************")
fmt.Println(" Starting MySQL Bkup... ")
fmt.Println("***********************************")
utils.Info("Running in Scheduled mode") utils.Info("Running in Scheduled mode")
utils.Info("Execution period %s", os.Getenv("BACKUP_CRON_EXPRESSION")) utils.Info("Backup cron expression: %s", os.Getenv("BACKUP_CRON_EXPRESSION"))
utils.Info("Storage type %s ", storage) utils.Info("Storage type %s ", storage)
//Test database connexion //Test database connexion
testDatabaseConnection(db) testDatabaseConnection(db)
utils.Info("Creating backup job...") utils.Info("Creating a new cron instance...")
CreateCrontabScript(disableCompression, storage) // Create a new cron instance
c := cron.New()
//Set BACKUP_CRON_EXPRESSION to nil // Add a cron job that runs every 10 seconds
err := os.Setenv("BACKUP_CRON_EXPRESSION", "") c.AddFunc(config.cronExpression, func() {
if err != nil { BackupTask(db, config)
return })
} // Start the cron scheduler
c.Start()
supervisorConfig := "/etc/supervisor/supervisord.conf" utils.Info("Creating a new cron instance...done")
defer c.Stop()
// Start Supervisor select {}
cmd := exec.Command("supervisord", "-c", supervisorConfig)
err = cmd.Start()
if err != nil {
utils.Fatal(fmt.Sprintf("Failed to start supervisord: %v", err))
}
utils.Info("Backup job started")
defer func() {
if err := cmd.Process.Kill(); err != nil {
utils.Info("Failed to kill supervisord process: %v", err)
} else {
utils.Info("Supervisor stopped.")
}
}()
if _, err := os.Stat(cronLogFile); os.IsNotExist(err) {
utils.Fatal(fmt.Sprintf("Log file %s does not exist.", cronLogFile))
}
t, err := tail.TailFile(cronLogFile, tail.Config{Follow: true})
if err != nil {
utils.Fatal("Failed to tail file: %v", err)
}
// Read and print new lines from the log file
for line := range t.Lines {
fmt.Println(line.Text)
}
} }
func intro() { func BackupTask(db *dbConfig, config *BackupConfig) {
utils.Info("Starting MySQL Backup...") utils.Info("Starting backup task...")
utils.Info("Copyright © 2024 Jonas Kaninda ") switch config.storage {
case "s3":
s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
case "local":
localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
case "ssh", "remote":
sshBackup(db, config.backupFileName, config.remotePath, config.disableCompression, config.prune, config.backupRetention, config.encryption)
case "ftp":
utils.Fatal("Not supported storage type: %s", config.storage)
default:
localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption)
}
} }
// BackupDatabase backup database // BackupDatabase backup database

View File

@@ -7,9 +7,11 @@
package pkg package pkg
import ( import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils" "github.com/jkaninda/mysql-bkup/utils"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"os" "os"
"time"
) )
type Config struct { type Config struct {
@@ -30,7 +32,27 @@ type targetDbConfig struct {
targetDbName string targetDbName string
} }
func getDbConfig(cmd *cobra.Command) *dbConfig { type BackupConfig struct {
backupFileName string
backupRetention int
disableCompression bool
prune bool
encryption bool
remotePath string
gpqPassphrase string
storage string
cronExpression string
}
type RestoreConfig struct {
s3Path string
remotePath string
storage string
file string
bucket string
gpqPassphrase string
}
func initDbConfig(cmd *cobra.Command) *dbConfig {
//Set env //Set env
utils.GetEnv(cmd, "dbname", "DB_NAME") utils.GetEnv(cmd, "dbname", "DB_NAME")
dConf := dbConfig{} dConf := dbConfig{}
@@ -47,7 +69,66 @@ func getDbConfig(cmd *cobra.Command) *dbConfig {
} }
return &dConf return &dConf
} }
func getTargetDbConfig() *targetDbConfig { func initBackupConfig(cmd *cobra.Command) *BackupConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
//Get flag value and set env
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
backupRetention, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
_, _ = cmd.Flags().GetString("mode")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
if gpqPassphrase != "" {
encryption = true
}
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20240102_150405"))
if disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20240102_150405"))
}
//Initialize backup configs
config := BackupConfig{}
config.backupFileName = backupFileName
config.backupRetention = backupRetention
config.disableCompression = disableCompression
config.prune = prune
config.storage = storage
config.encryption = encryption
config.remotePath = remotePath
config.gpqPassphrase = gpqPassphrase
config.cronExpression = cronExpression
return &config
}
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
_, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
//Initialize restore configs
rConfig := RestoreConfig{}
rConfig.s3Path = s3Path
rConfig.remotePath = remotePath
rConfig.storage = storage
rConfig.bucket = bucket
rConfig.file = file
rConfig.storage = storage
rConfig.gpqPassphrase = gpqPassphrase
return &rConfig
}
func initTargetDbConfig() *targetDbConfig {
tdbConfig := targetDbConfig{} tdbConfig := targetDbConfig{}
tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST") tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST")
tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT") tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT")

View File

@@ -125,3 +125,7 @@ func testDatabaseConnection(db *dbConfig) {
utils.Info("Successfully connected to %s database", db.dbName) utils.Info("Successfully connected to %s database", db.dbName)
} }
func intro() {
utils.Info("Starting MySQL Backup...")
utils.Info("Copyright © 2024 Jonas Kaninda ")
}

View File

@@ -17,8 +17,8 @@ func StartMigration(cmd *cobra.Command) {
intro() intro()
utils.Info("Starting database migration...") utils.Info("Starting database migration...")
//Get DB config //Get DB config
dbConf = getDbConfig(cmd) dbConf = initDbConfig(cmd)
targetDbConf = getTargetDbConfig() targetDbConf = initTargetDbConfig()
//Defining the target database variables //Defining the target database variables
newDbConfig := dbConfig{} newDbConfig := dbConfig{}

View File

@@ -17,33 +17,24 @@ import (
func StartRestore(cmd *cobra.Command) { func StartRestore(cmd *cobra.Command) {
intro() intro()
//Set env dbConf = initDbConfig(cmd)
utils.SetEnv("STORAGE_PATH", storagePath) restoreConf := initRestoreConfig(cmd)
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
executionMode, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
dbConf = getDbConfig(cmd)
switch storage { switch storage {
case "s3": case "s3":
restoreFromS3(dbConf, file, bucket, s3Path) restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path)
case "local": case "local":
utils.Info("Restore database from local") utils.Info("Restore database from local")
copyToTmp(storagePath, file) copyToTmp(storagePath, restoreConf.file)
RestoreDatabase(dbConf, file) RestoreDatabase(dbConf, restoreConf.file)
case "ssh": case "ssh":
restoreFromRemote(dbConf, file, remotePath) restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath)
case "ftp": case "ftp":
utils.Fatal("Restore from FTP is not yet supported") utils.Fatal("Restore from FTP is not yet supported")
default: default:
utils.Info("Restore database from local") utils.Info("Restore database from local")
copyToTmp(storagePath, file) copyToTmp(storagePath, restoreConf.file)
RestoreDatabase(dbConf, file) RestoreDatabase(dbConf, restoreConf.file)
} }
} }

View File

@@ -16,7 +16,6 @@ const gpgExtension = "gpg"
var ( var (
storage = "local" storage = "local"
file = "" file = ""
executionMode = "default"
storagePath = "/backup" storagePath = "/backup"
disableCompression = false disableCompression = false
encryption = false encryption = false