mirror of
https://github.com/jkaninda/mysql-bkup.git
synced 2025-12-06 13:39:41 +01:00
chore: migrate backup scheduled mode from linux cron to go cron
This commit is contained in:
@@ -7,9 +7,11 @@
|
||||
package pkg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/jkaninda/mysql-bkup/utils"
|
||||
"github.com/spf13/cobra"
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
@@ -30,7 +32,27 @@ type targetDbConfig struct {
|
||||
targetDbName string
|
||||
}
|
||||
|
||||
func getDbConfig(cmd *cobra.Command) *dbConfig {
|
||||
type BackupConfig struct {
|
||||
backupFileName string
|
||||
backupRetention int
|
||||
disableCompression bool
|
||||
prune bool
|
||||
encryption bool
|
||||
remotePath string
|
||||
gpqPassphrase string
|
||||
storage string
|
||||
cronExpression string
|
||||
}
|
||||
type RestoreConfig struct {
|
||||
s3Path string
|
||||
remotePath string
|
||||
storage string
|
||||
file string
|
||||
bucket string
|
||||
gpqPassphrase string
|
||||
}
|
||||
|
||||
func initDbConfig(cmd *cobra.Command) *dbConfig {
|
||||
//Set env
|
||||
utils.GetEnv(cmd, "dbname", "DB_NAME")
|
||||
dConf := dbConfig{}
|
||||
@@ -47,7 +69,66 @@ func getDbConfig(cmd *cobra.Command) *dbConfig {
|
||||
}
|
||||
return &dConf
|
||||
}
|
||||
func getTargetDbConfig() *targetDbConfig {
|
||||
func initBackupConfig(cmd *cobra.Command) *BackupConfig {
|
||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
|
||||
|
||||
//Get flag value and set env
|
||||
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
|
||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||
backupRetention, _ := cmd.Flags().GetInt("keep-last")
|
||||
prune, _ := cmd.Flags().GetBool("prune")
|
||||
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
|
||||
_, _ = cmd.Flags().GetString("mode")
|
||||
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||
cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION")
|
||||
|
||||
if gpqPassphrase != "" {
|
||||
encryption = true
|
||||
}
|
||||
//Generate file name
|
||||
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20240102_150405"))
|
||||
if disableCompression {
|
||||
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20240102_150405"))
|
||||
}
|
||||
|
||||
//Initialize backup configs
|
||||
config := BackupConfig{}
|
||||
config.backupFileName = backupFileName
|
||||
config.backupRetention = backupRetention
|
||||
config.disableCompression = disableCompression
|
||||
config.prune = prune
|
||||
config.storage = storage
|
||||
config.encryption = encryption
|
||||
config.remotePath = remotePath
|
||||
config.gpqPassphrase = gpqPassphrase
|
||||
config.cronExpression = cronExpression
|
||||
return &config
|
||||
}
|
||||
func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
|
||||
utils.SetEnv("STORAGE_PATH", storagePath)
|
||||
|
||||
//Get flag value and set env
|
||||
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
|
||||
remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH")
|
||||
storage = utils.GetEnv(cmd, "storage", "STORAGE")
|
||||
file = utils.GetEnv(cmd, "file", "FILE_NAME")
|
||||
_, _ = cmd.Flags().GetString("mode")
|
||||
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
|
||||
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
|
||||
//Initialize restore configs
|
||||
rConfig := RestoreConfig{}
|
||||
rConfig.s3Path = s3Path
|
||||
rConfig.remotePath = remotePath
|
||||
rConfig.storage = storage
|
||||
rConfig.bucket = bucket
|
||||
rConfig.file = file
|
||||
rConfig.storage = storage
|
||||
rConfig.gpqPassphrase = gpqPassphrase
|
||||
return &rConfig
|
||||
}
|
||||
func initTargetDbConfig() *targetDbConfig {
|
||||
tdbConfig := targetDbConfig{}
|
||||
tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST")
|
||||
tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT")
|
||||
|
||||
Reference in New Issue
Block a user