From e4d2d69fa04c8573588f4a101863dda025292033 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 09:20:35 +0200 Subject: [PATCH 01/13] chore: migrate backup scheduled mode from linux cron to go cron --- cmd/backup.go | 3 +- docker/Dockerfile | 5 +- go.mod | 2 +- go.sum | 2 + pkg/backup.go | 127 ++++++++++++++++------------------------------ pkg/config.go | 79 +++++++++++++++++++++++++++- pkg/migrate.go | 4 +- pkg/restore.go | 28 ++++------ utils/utils.go | 5 ++ 9 files changed, 145 insertions(+), 110 deletions(-) diff --git a/cmd/backup.go b/cmd/backup.go index 575ec33..d904a2f 100644 --- a/cmd/backup.go +++ b/cmd/backup.go @@ -30,7 +30,8 @@ func init() { BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3") BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled") - BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time") + BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated") + BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression") BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled") BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days") BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression") diff --git a/docker/Dockerfile b/docker/Dockerfile index 4e5415c..4b62dcf 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -36,7 +36,7 @@ ENV TARGET_DB_NAME="" ENV TARGET_DB_USERNAME="" ENV TARGET_DB_PASSWORD="" ARG DEBIAN_FRONTEND=noninteractive -ENV VERSION="v1.2.5" +ENV VERSION="v1.2.8" ENV BACKUP_CRON_EXPRESSION="" ENV TG_TOKEN="" ENV TG_CHAT_ID="" @@ -50,7 +50,7 @@ LABEL author="Jonas Kaninda" RUN apt-get update -qq -RUN apt install postgresql-client supervisor cron gnupg -y +RUN apt install postgresql-client cron gnupg -y # Clear cache RUN apt-get clean && rm -rf /var/lib/apt/lists/* @@ -71,7 +71,6 @@ RUN chmod +x /usr/local/bin/pg-bkup RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup -ADD docker/supervisord.conf /etc/supervisor/supervisord.conf # Create the backup script and make it executable RUN echo '#!/bin/sh\n/usr/local/bin/pg-bkup backup "$@"' > /usr/local/bin/backup && \ chmod +x /usr/local/bin/backup diff --git a/go.mod b/go.mod index 9700d93..b9683c2 100644 --- a/go.mod +++ b/go.mod @@ -12,9 +12,9 @@ require ( github.com/bramvdbogaerde/go-scp v1.5.0 // indirect github.com/hpcloud/tail v1.0.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/robfig/cron/v3 v3.0.1 // indirect golang.org/x/crypto v0.18.0 // indirect golang.org/x/sys v0.22.0 // indirect gopkg.in/fsnotify.v1 v1.4.7 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect ) - diff --git a/go.sum b/go.sum index 8671785..e079d64 100644 --- a/go.sum +++ b/go.sum @@ -29,6 +29,8 @@ github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEp github.com/minio/minio-go/v7 v7.0.74 h1:fTo/XlPBTSpo3BAMshlwKL5RspXRv9us5UeHEGYCFe0= github.com/minio/minio-go/v7 v7.0.74/go.mod h1:qydcVzV8Hqtj1VtEocfxbmVFa2siu6HGa+LDEPogjD8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= diff --git a/pkg/backup.go b/pkg/backup.go index 7d65e31..b243fa5 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -8,8 +8,8 @@ package pkg import ( "fmt" - "github.com/hpcloud/tail" "github.com/jkaninda/pg-bkup/utils" + "github.com/robfig/cron/v3" "github.com/spf13/cobra" "log" "os" @@ -20,104 +20,67 @@ import ( func StartBackup(cmd *cobra.Command) { intro() - //Set env - utils.SetEnv("STORAGE_PATH", storagePath) - utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") + dbConf = initDbConfig(cmd) + //Initialize backup configs + config := initBackupConfig(cmd) - //Get flag value and set env - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") - storage = utils.GetEnv(cmd, "storage", "STORAGE") - file = utils.GetEnv(cmd, "file", "FILE_NAME") - backupRetention, _ := cmd.Flags().GetInt("keep-last") - prune, _ := cmd.Flags().GetBool("prune") - disableCompression, _ = cmd.Flags().GetBool("disable-compression") - executionMode, _ = cmd.Flags().GetString("mode") - gpgPassphrase := os.Getenv("GPG_PASSPHRASE") - _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") - - dbConf = getDbConfig(cmd) - - // - if gpgPassphrase != "" { - encryption = true - } - - //Generate file name - backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405")) - if disableCompression { - backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405")) - } - - if executionMode == "default" { - switch storage { - case "s3": - s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) - case "local": - localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) - case "ssh", "remote": - sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption) - case "ftp": - utils.Fatal("Not supported storage type: %s", storage) - default: - localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption) - } - - } else if executionMode == "scheduled" { - scheduledMode(dbConf, storage) + if config.cronExpression == "" { + BackupTask(dbConf, config) } else { - utils.Fatal("Error, unknown execution mode!") + if utils.IsValidCronExpression(config.cronExpression) { + scheduledMode(dbConf, config) + } else { + utils.Fatal("Cron expression is not valid: %s", config.cronExpression) + } } } // Run in scheduled mode -func scheduledMode(db *dbConfig, storage string) { - - fmt.Println() - fmt.Println("**********************************") - fmt.Println(" Starting PostgreSQL Bkup... ") - fmt.Println("***********************************") +func scheduledMode(db *dbConfig, config *BackupConfig) { utils.Info("Running in Scheduled mode") - utils.Info("Execution period %s ", os.Getenv("BACKUP_CRON_EXPRESSION")) - utils.Info("Storage type %s ", storage) + utils.Info("Backup cron expression: %s", config.cronExpression) + utils.Info("Storage type %s ", config.storage) //Test database connexion testDatabaseConnection(db) - utils.Info("Creating backup job...") - CreateCrontabScript(disableCompression, storage) + utils.Info("Creating cron job...") + // Create a new cron instance + c := cron.New() - supervisorConfig := "/etc/supervisor/supervisord.conf" - - // Start Supervisor - cmd := exec.Command("supervisord", "-c", supervisorConfig) - err := cmd.Start() + _, err := c.AddFunc(config.cronExpression, func() { + BackupTask(db, config) + }) if err != nil { - utils.Fatal("Failed to start supervisord: %v", err) + return } - utils.Info("Backup job started") - - defer func() { - if err := cmd.Process.Kill(); err != nil { - utils.Info("Failed to kill supervisord process: %v", err) - } else { - utils.Info("Supervisor stopped.") - } - }() - - if _, err := os.Stat(cronLogFile); os.IsNotExist(err) { - utils.Fatal(fmt.Sprintf("Log file %s does not exist.", cronLogFile)) + // Start the cron scheduler + c.Start() + utils.Info("Creating cron job...done") + defer c.Stop() + select {} +} +func BackupTask(db *dbConfig, config *BackupConfig) { + utils.Info("Starting backup task...") + //Generate file name + backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20240102_150405")) + if config.disableCompression { + backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20240102_150405")) } - t, err := tail.TailFile(cronLogFile, tail.Config{Follow: true}) - if err != nil { - utils.Fatal("Failed to tail file: %v", err) + config.backupFileName = backupFileName + switch config.storage { + case "s3": + s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + case "local": + localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + case "ssh", "remote": + sshBackup(db, config.backupFileName, config.remotePath, config.disableCompression, config.prune, config.backupRetention, config.encryption) + case "ftp": + utils.Fatal("Not supported storage type: %s", config.storage) + default: + localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) } - - // Read and print new lines from the log file - for line := range t.Lines { - fmt.Println(line.Text) - } - } func intro() { utils.Info("Starting PostgreSQL Backup...") diff --git a/pkg/config.go b/pkg/config.go index 610bbb7..f269509 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -33,8 +33,19 @@ type TgConfig struct { Token string ChatId string } +type BackupConfig struct { + backupFileName string + backupRetention int + disableCompression bool + prune bool + encryption bool + remotePath string + gpqPassphrase string + storage string + cronExpression string +} -func getDbConfig(cmd *cobra.Command) *dbConfig { +func initDbConfig(cmd *cobra.Command) *dbConfig { //Set env utils.GetEnv(cmd, "dbname", "DB_NAME") dConf := dbConfig{} @@ -51,7 +62,71 @@ func getDbConfig(cmd *cobra.Command) *dbConfig { } return &dConf } -func getTargetDbConfig() *targetDbConfig { +func initBackupConfig(cmd *cobra.Command) *BackupConfig { + utils.SetEnv("STORAGE_PATH", storagePath) + utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION") + utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") + + //Get flag value and set env + remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + storage = utils.GetEnv(cmd, "storage", "STORAGE") + backupRetention, _ := cmd.Flags().GetInt("keep-last") + prune, _ := cmd.Flags().GetBool("prune") + disableCompression, _ = cmd.Flags().GetBool("disable-compression") + executionMode, _ = cmd.Flags().GetString("mode") + gpqPassphrase := os.Getenv("GPG_PASSPHRASE") + _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") + cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") + + if gpqPassphrase != "" { + encryption = true + } + + //Initialize backup configs + config := BackupConfig{} + config.backupRetention = backupRetention + config.disableCompression = disableCompression + config.prune = prune + config.storage = storage + config.encryption = encryption + config.remotePath = remotePath + config.gpqPassphrase = gpqPassphrase + config.cronExpression = cronExpression + return &config +} + +type RestoreConfig struct { + s3Path string + remotePath string + storage string + file string + bucket string + gpqPassphrase string +} + +func initRestoreConfig(cmd *cobra.Command) *RestoreConfig { + utils.SetEnv("STORAGE_PATH", storagePath) + + //Get flag value and set env + s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") + remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + storage = utils.GetEnv(cmd, "storage", "STORAGE") + file = utils.GetEnv(cmd, "file", "FILE_NAME") + _, _ = cmd.Flags().GetString("mode") + bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + gpqPassphrase := os.Getenv("GPG_PASSPHRASE") + //Initialize restore configs + rConfig := RestoreConfig{} + rConfig.s3Path = s3Path + rConfig.remotePath = remotePath + rConfig.storage = storage + rConfig.bucket = bucket + rConfig.file = file + rConfig.storage = storage + rConfig.gpqPassphrase = gpqPassphrase + return &rConfig +} +func initTargetDbConfig() *targetDbConfig { tdbConfig := targetDbConfig{} tdbConfig.targetDbHost = os.Getenv("TARGET_DB_HOST") tdbConfig.targetDbPort = os.Getenv("TARGET_DB_PORT") diff --git a/pkg/migrate.go b/pkg/migrate.go index cb3444d..17908f4 100644 --- a/pkg/migrate.go +++ b/pkg/migrate.go @@ -17,8 +17,8 @@ func StartMigration(cmd *cobra.Command) { intro() utils.Info("Starting database migration...") //Get DB config - dbConf = getDbConfig(cmd) - targetDbConf = getTargetDbConfig() + dbConf = initDbConfig(cmd) + targetDbConf = initTargetDbConfig() //Defining the target database variables newDbConfig := dbConfig{} diff --git a/pkg/restore.go b/pkg/restore.go index 771925b..ca13d6a 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -17,34 +17,24 @@ import ( func StartRestore(cmd *cobra.Command) { intro() - //Set env - utils.SetEnv("STORAGE_PATH", storagePath) + dbConf = initDbConfig(cmd) + restoreConf := initRestoreConfig(cmd) - //Get flag value and set env - s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") - storage = utils.GetEnv(cmd, "storage", "STORAGE") - file = utils.GetEnv(cmd, "file", "FILE_NAME") - executionMode, _ = cmd.Flags().GetString("mode") - bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") - - dbConf = getDbConfig(cmd) - - switch storage { + switch restoreConf.storage { case "s3": - restoreFromS3(dbConf, file, bucket, s3Path) + restoreFromS3(dbConf, restoreConf.file, restoreConf.bucket, restoreConf.s3Path) case "local": utils.Info("Restore database from local") - copyToTmp(storagePath, file) - RestoreDatabase(dbConf, file) + copyToTmp(storagePath, restoreConf.file) + RestoreDatabase(dbConf, restoreConf.file) case "ssh": - restoreFromRemote(dbConf, file, remotePath) + restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath) case "ftp": utils.Fatal("Restore from FTP is not yet supported") default: utils.Info("Restore database from local") - copyToTmp(storagePath, file) - RestoreDatabase(dbConf, file) + copyToTmp(storagePath, restoreConf.file) + RestoreDatabase(dbConf, restoreConf.file) } } diff --git a/utils/utils.go b/utils/utils.go index af116a5..35acb5c 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -10,6 +10,7 @@ import ( "bytes" "encoding/json" "fmt" + "github.com/robfig/cron/v3" "github.com/spf13/cobra" "io" "io/fs" @@ -248,3 +249,7 @@ func getTgUrl() string { return fmt.Sprintf("https://api.telegram.org/bot%s", os.Getenv("TG_TOKEN")) } +func IsValidCronExpression(cronExpr string) bool { + _, err := cron.ParseStandard(cronExpr) + return err == nil +} From 39890e71a988e6f82ec218908452020943fe4f16 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 09:22:28 +0200 Subject: [PATCH 02/13] chore: Add notification emoji for failed and success message --- pkg/scripts.go | 73 -------------------------------------------------- utils/utils.go | 4 +-- 2 files changed, 2 insertions(+), 75 deletions(-) delete mode 100644 pkg/scripts.go diff --git a/pkg/scripts.go b/pkg/scripts.go deleted file mode 100644 index 12dec23..0000000 --- a/pkg/scripts.go +++ /dev/null @@ -1,73 +0,0 @@ -package pkg - -// Package pkg / -/***** -@author Jonas Kaninda -@license MIT License -@Copyright © 2024 Jonas Kaninda -**/ -import ( - "fmt" - "github.com/jkaninda/pg-bkup/utils" - "os" - "os/exec" -) - -func CreateCrontabScript(disableCompression bool, storage string) { - //task := "/usr/local/bin/backup_cron.sh" - touchCmd := exec.Command("touch", backupCronFile) - if err := touchCmd.Run(); err != nil { - utils.Fatal("Error creating file %s: %v\n", backupCronFile, err) - } - var disableC = "" - if disableCompression { - disableC = "--disable-compression" - } - - var scriptContent string - - scriptContent = fmt.Sprintf(`#!/usr/bin/env bash -set -e -/usr/local/bin/pg-bkup backup --dbname %s --storage %s %v -`, os.Getenv("DB_NAME"), storage, disableC) - - if err := utils.WriteToFile(backupCronFile, scriptContent); err != nil { - utils.Fatal("Error writing to %s: %v\n", backupCronFile, err) - } - - chmodCmd := exec.Command("chmod", "+x", "/usr/local/bin/backup_cron.sh") - if err := chmodCmd.Run(); err != nil { - utils.Fatal("Error changing permissions of %s: %v\n", backupCronFile, err) - } - - lnCmd := exec.Command("ln", "-s", "/usr/local/bin/backup_cron.sh", "/usr/local/bin/backup_cron") - if err := lnCmd.Run(); err != nil { - utils.Fatal("Error creating symbolic link: %v\n", err) - - } - - touchLogCmd := exec.Command("touch", cronLogFile) - if err := touchLogCmd.Run(); err != nil { - utils.Fatal("Error creating file %s: %v\n", cronLogFile, err) - } - - cronJob := "/etc/cron.d/backup_cron" - touchCronCmd := exec.Command("touch", cronJob) - if err := touchCronCmd.Run(); err != nil { - utils.Fatal("Error creating file %s: %v\n", cronJob, err) - } - - cronContent := fmt.Sprintf(`%s root exec /bin/bash -c ". /run/supervisord.env; /usr/local/bin/backup_cron.sh >> %s" -`, os.Getenv("BACKUP_CRON_EXPRESSION"), cronLogFile) - - if err := utils.WriteToFile(cronJob, cronContent); err != nil { - utils.Fatal("Error writing to %s: %v\n", cronJob, err) - } - utils.ChangePermission("/etc/cron.d/backup_cron", 0644) - - crontabCmd := exec.Command("crontab", "/etc/cron.d/backup_cron") - if err := crontabCmd.Run(); err != nil { - utils.Fatal("Error updating crontab: ", err) - } - utils.Info("Backup job created.") -} diff --git a/utils/utils.go b/utils/utils.go index 35acb5c..8ecb140 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -223,7 +223,7 @@ func NotifySuccess(fileName string) { //Telegram notification err := CheckEnvVars(vars) if err == nil { - message := "PostgreSQL Backup \n" + + message := "[✅ PostgreSQL Backup ]\n" + "Database has been backed up \n" + "Backup name is " + fileName sendMessage(message) @@ -238,7 +238,7 @@ func NotifyError(error string) { //Telegram notification err := CheckEnvVars(vars) if err == nil { - message := "PostgreSQL Backup \n" + + message := "[🔴 PostgreSQL Backup ]\n" + "An error occurred during database backup \n" + "Error: " + error sendMessage(message) From 91f4a462264d75467e3be34892fb0a18aee1400f Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 09:48:37 +0200 Subject: [PATCH 03/13] chore: define gpg home directory, update scheduled deployment doc --- docs/how-tos/backup-to-s3.md | 3 +- docs/how-tos/backup-to-ssh.md | 7 +-- docs/how-tos/backup.md | 5 +- docs/how-tos/encrypt-backup.md | 3 ++ docs/reference/index.md | 83 ++++++++++++++++++++-------------- pkg/config.go | 2 +- pkg/encrypt.go | 4 +- pkg/var.go | 10 ++-- 8 files changed, 67 insertions(+), 50 deletions(-) diff --git a/docs/how-tos/backup-to-s3.md b/docs/how-tos/backup-to-s3.md index a3d78da..5d09b48 100644 --- a/docs/how-tos/backup-to-s3.md +++ b/docs/how-tos/backup-to-s3.md @@ -48,7 +48,7 @@ networks: ### Recurring backups to S3 As explained above, you need just to add AWS environment variables and specify the storage type `--storage s3`. -In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. +In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below. ```yml services: @@ -72,6 +72,7 @@ services: - AWS_REGION="us-west-2" - AWS_ACCESS_KEY=xxxx - AWS_SECRET_KEY=xxxxx + # - BACKUP_CRON_EXPRESSION=0 1 * * * # Optional ## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true - AWS_DISABLE_SSL="false" # pg-bkup container must be connected to the same network with your database diff --git a/docs/how-tos/backup-to-ssh.md b/docs/how-tos/backup-to-ssh.md index f68f294..1d5a9e6 100644 --- a/docs/how-tos/backup-to-ssh.md +++ b/docs/how-tos/backup-to-ssh.md @@ -52,7 +52,7 @@ networks: ### Recurring backups to SSH remote server As explained above, you need just to add required environment variables and specify the storage type `--storage ssh`. -You can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. +You can use `--cron-expression "* * * * *"` or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below. ```yml services: @@ -63,10 +63,7 @@ services: # for a list of available releases. image: jkaninda/pg-bkup container_name: pg-bkup - command: - - /bin/sh - - -c - - pg-bkup backup -d database --storage ssh --mode scheduled --period "0 1 * * *" + command: backup -d database --storage ssh --cron-expression "0 1 * * *" volumes: - ./id_ed25519:/tmp/id_ed25519" environment: diff --git a/docs/how-tos/backup.md b/docs/how-tos/backup.md index 703fe8a..fc52f33 100644 --- a/docs/how-tos/backup.md +++ b/docs/how-tos/backup.md @@ -56,7 +56,7 @@ networks: jkaninda/pg-bkup backup -d database_name ``` -In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. +In case you need to use recurring backups, you can use `--cron-expression "0 1 * * *"` flag or `BACKUP_CRON_EXPRESSION=0 1 * * *` as described below. ```yml services: @@ -67,7 +67,7 @@ services: # for a list of available releases. image: jkaninda/pg-bkup container_name: pg-bkup - #command: backup -d database --mode scheduled --period "0 1 * * *" + command: backup -d database --cron-expression "0 1 * * *" volumes: - ./backup:/backup environment: @@ -76,6 +76,7 @@ services: - DB_NAME=database - DB_USERNAME=username - DB_PASSWORD=password + - BACKUP_CRON_EXPRESSION=0 1 * * * # pg-bkup container must be connected to the same network with your database networks: - web diff --git a/docs/how-tos/encrypt-backup.md b/docs/how-tos/encrypt-backup.md index e564c49..fbe976e 100644 --- a/docs/how-tos/encrypt-backup.md +++ b/docs/how-tos/encrypt-backup.md @@ -11,6 +11,9 @@ The image supports encrypting backups using GPG out of the box. In case a `GPG_P {: .warning } To restore an encrypted backup, you need to provide the same GPG passphrase used during backup process. +- GPG home directory `/config/gnupg` +- Cipher algorithm `aes256` +- To decrypt manually, you need to install `gnupg` ```shell diff --git a/docs/reference/index.md b/docs/reference/index.md index 55ba6a4..0f996f4 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -25,51 +25,50 @@ Backup, restore and migrate targets, schedule and retention are configured using | --path | | AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup` | | --dbname | -d | Database name | | --port | -p | Database port (default: 5432) | -| --mode | -m | Execution mode. default or scheduled (default: default) | | --disable-compression | | Disable database backup compression | | --prune | | Delete old backup, default disabled | | --keep-last | | Delete old backup created more than specified days ago, default 7 days | -| --period | | Crontab period for scheduled mode only. (default: "0 1 * * *") | +| --cron-expression | | Backup cron expression, eg: (* * * * *) or @daily | | --help | -h | Print this help message and exit | | --version | -V | Print version information and exit | ## Environment variables -| Name | Requirement | Description | -|------------------------|----------------------------------------------------|------------------------------------------------------| -| DB_PORT | Optional, default 5432 | Database port number | -| DB_HOST | Required | Database host | -| DB_NAME | Optional if it was provided from the -d flag | Database name | -| DB_USERNAME | Required | Database user name | -| DB_PASSWORD | Required | Database password | -| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | -| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | -| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | -| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | -| AWS_REGION | Optional, required for S3 storage | AWS Region | -| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | -| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | -| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | -| BACKUP_CRON_EXPRESSION | Optional if it was provided from the --period flag | Backup cron expression for docker in scheduled mode | -| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | -| SSH_USER | Optional, required for SSH storage | ssh remote user | -| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | -| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | -| SSH_PORT | Optional, required for SSH storage | ssh remote server port | -| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) | -| TARGET_DB_HOST | Optional, required for database migration | Target database host | -| TARGET_DB_PORT | Optional, required for database migration | Target database port | -| TARGET_DB_NAME | Optional, required for database migration | Target database name | -| TARGET_DB_USERNAME | Optional, required for database migration | Target database username | -| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password | -| TG_TOKEN | Optional, required for Telegram notification | Telegram token | -| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID | +| Name | Requirement | Description | +|------------------------|---------------------------------------------------------------|------------------------------------------------------| +| DB_PORT | Optional, default 5432 | Database port number | +| DB_HOST | Required | Database host | +| DB_NAME | Optional if it was provided from the -d flag | Database name | +| DB_USERNAME | Required | Database user name | +| DB_PASSWORD | Required | Database password | +| AWS_ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key | +| AWS_SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key | +| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | +| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name | +| AWS_REGION | Optional, required for S3 storage | AWS Region | +| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL | +| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) | +| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase | +| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode | +| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip | +| SSH_USER | Optional, required for SSH storage | ssh remote user | +| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password | +| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key | +| SSH_PORT | Optional, required for SSH storage | ssh remote server port | +| SSH_REMOTE_PATH | Optional, required for SSH storage | ssh remote path (/home/toto/backup) | +| TARGET_DB_HOST | Optional, required for database migration | Target database host | +| TARGET_DB_PORT | Optional, required for database migration | Target database port | +| TARGET_DB_NAME | Optional, required for database migration | Target database name | +| TARGET_DB_USERNAME | Optional, required for database migration | Target database username | +| TARGET_DB_PASSWORD | Optional, required for database migration | Target database password | +| TG_TOKEN | Optional, required for Telegram notification | Telegram token (`BOT-ID:BOT-TOKEN`) | +| TG_CHAT_ID | Optional, required for Telegram notification | Telegram Chat ID | --- ## Run in Scheduled mode This image can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources. -For Docker, you need to run it in scheduled mode by adding `--mode scheduled` flag and specify the periodical backup time by adding `--period "0 1 * * *"` flag. +For Docker, you need to run it in scheduled mode by adding `--cron-expression "* * * * *"` flag or by defining `BACKUP_CRON_EXPRESSION=0 1 * * *` environment variable. ## Syntax of crontab (field description) @@ -111,4 +110,22 @@ Easy to remember format: ```conf 0 1 * * * -``` \ No newline at end of file +``` +## Predefined schedules +You may use one of several pre-defined schedules in place of a cron expression. + +| Entry | Description | Equivalent To | +|------------------------|--------------------------------------------|---------------| +| @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 1 1 * | +| @monthly | Run once a month, midnight, first of month | 0 0 1 * * | +| @weekly | Run once a week, midnight between Sat/Sun | 0 0 * * 0 | +| @daily (or @midnight) | Run once a day, midnight | 0 0 * * * | +| @hourly | Run once an hour, beginning of hour | 0 * * * * | + +### Intervals +You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this: + +@every +where "duration" is a string accepted by time. + +For example, "@every 1h30m10s" would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that. \ No newline at end of file diff --git a/pkg/config.go b/pkg/config.go index f269509..603880e 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -73,7 +73,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { backupRetention, _ := cmd.Flags().GetInt("keep-last") prune, _ := cmd.Flags().GetBool("prune") disableCompression, _ = cmd.Flags().GetBool("disable-compression") - executionMode, _ = cmd.Flags().GetString("mode") + _, _ = cmd.Flags().GetString("mode") gpqPassphrase := os.Getenv("GPG_PASSPHRASE") _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") diff --git a/pkg/encrypt.go b/pkg/encrypt.go index bd85a32..1211463 100644 --- a/pkg/encrypt.go +++ b/pkg/encrypt.go @@ -16,7 +16,7 @@ import ( func Decrypt(inputFile string, passphrase string) error { utils.Info("Decrypting backup file: %s...", inputFile) //Create gpg home dir - err := utils.MakeDir(gpgHome) + err := utils.MakeDirAll(gpgHome) if err != nil { return err } @@ -37,7 +37,7 @@ func Decrypt(inputFile string, passphrase string) error { func Encrypt(inputFile string, passphrase string) error { utils.Info("Encrypting backup...") //Create gpg home dir - err := utils.MakeDir(gpgHome) + err := utils.MakeDirAll(gpgHome) if err != nil { return err } diff --git a/pkg/var.go b/pkg/var.go index 8919960..52dcf2e 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -6,17 +6,15 @@ **/ package pkg -const cronLogFile = "/var/log/pg-bkup.log" const tmpPath = "/tmp/backup" -const backupCronFile = "/usr/local/bin/backup_cron.sh" -const gpgHome = "gnupg" +const gpgHome = "/config/gnupg" const algorithm = "aes256" const gpgExtension = "gpg" var ( - storage = "local" - file = "" - executionMode = "default" + storage = "local" + file = "" + storagePath = "/backup" disableCompression = false encryption = false From ddc76a6a1ba37cd79f26458885b72a333e4f8ba1 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 09:54:08 +0200 Subject: [PATCH 04/13] chore: update help example --- utils/constant.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/utils/constant.go b/utils/constant.go index d92e257..1bf1437 100644 --- a/utils/constant.go +++ b/utils/constant.go @@ -7,10 +7,10 @@ package utils const RestoreExample = "pg-bkup restore --dbname database --file db_20231219_022941.sql.gz\n" + - "bkup restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz" + "restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz" const BackupExample = "pg-bkup backup --dbname database --disable-compression\n" + - "pg-bkup backup --dbname database --storage s3 --path /custom-path --disable-compression" + "backup --dbname database --storage s3 --path /custom-path --disable-compression" const MainExample = "pg-bkup backup --dbname database --disable-compression\n" + - "pg-bkup backup --dbname database --storage s3 --path /custom-path\n" + - "pg-bkup restore --dbname database --file db_20231219_022941.sql.gz" + "backup --dbname database --storage s3 --path /custom-path\n" + + "restore --dbname database --file db_20231219_022941.sql.gz" From 4a1b8106b940d28a6b7c1c1344bbbf9dc1b57da9 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sat, 28 Sep 2024 21:28:27 +0200 Subject: [PATCH 05/13] docs: update deployment example --- docs/how-tos/deploy-on-kubernetes.md | 1 - docs/reference/index.md | 2 +- examples/docker-compose.scheduled.local.yaml | 5 +++-- examples/docker-compose.scheduled.s3.yaml | 3 ++- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/how-tos/deploy-on-kubernetes.md b/docs/how-tos/deploy-on-kubernetes.md index 3bf5be0..2e6a1e4 100644 --- a/docs/how-tos/deploy-on-kubernetes.md +++ b/docs/how-tos/deploy-on-kubernetes.md @@ -193,7 +193,6 @@ spec: command: - /bin/sh - -c - - bkup - backup --storage ssh --disable-compression resources: limits: diff --git a/docs/reference/index.md b/docs/reference/index.md index 0f996f4..98a07f2 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -123,7 +123,7 @@ You may use one of several pre-defined schedules in place of a cron expression. | @hourly | Run once an hour, beginning of hour | 0 * * * * | ### Intervals -You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this: +You may also schedule backup task at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this: @every where "duration" is a string accepted by time. diff --git a/examples/docker-compose.scheduled.local.yaml b/examples/docker-compose.scheduled.local.yaml index 8e2ccf4..b41071e 100644 --- a/examples/docker-compose.scheduled.local.yaml +++ b/examples/docker-compose.scheduled.local.yaml @@ -3,11 +3,12 @@ services: pg-bkup: image: jkaninda/pg-bkup container_name: pg-bkup - command: backup --dbname database_name --mode scheduled --period "0 1 * * *" + command: backup --dbname database_name volumes: - ./backup:/backup environment: - DB_PORT=5432 - DB_HOST=postgress - DB_USERNAME=userName - - DB_PASSWORD=${DB_PASSWORD} \ No newline at end of file + - DB_PASSWORD=${DB_PASSWORD} + - BACKUP_CRON_EXPRESSION=0 1 * * * \ No newline at end of file diff --git a/examples/docker-compose.scheduled.s3.yaml b/examples/docker-compose.scheduled.s3.yaml index 6e4e049..3727e57 100644 --- a/examples/docker-compose.scheduled.s3.yaml +++ b/examples/docker-compose.scheduled.s3.yaml @@ -6,7 +6,7 @@ services: # for a list of available releases. image: jkaninda/pg-bkup container_name: pg-bkup - command: backup --storage s3 -d my-database --mode scheduled --period "0 1 * * *" + command: backup --storage s3 -d my-database environment: - DB_PORT=5432 - DB_HOST=postgres @@ -21,6 +21,7 @@ services: - AWS_SECRET_KEY=xxxxx ## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true - AWS_DISABLE_SSL="false" + - BACKUP_CRON_EXPRESSION=0 1 * * * # pg-bkup container must be connected to the same network with your database networks: - web From a90e5c673c4d81bbeab25873875df2890612f2d7 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sun, 29 Sep 2024 06:19:16 +0200 Subject: [PATCH 06/13] Add favicon --- docs/favicon.ico | Bin 0 -> 4158 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/favicon.ico diff --git a/docs/favicon.ico b/docs/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..35aa34d7137f2c4e9cb7adc8ae486f2990503339 GIT binary patch literal 4158 zcmc&%X-`yV6rTQq_EW!V(l37MZ)j3?alr-m1q8%e7p&si3TPtkBDS@FNZqATBPNP< z1x0a1aSO8p!#Xg)3~OU@`aEa2mpcr`CTa?~bLZaoeb4?p=Zwwv1^!2jwBd84ZPZsb z+t)UmZ4|B%xW2>1_xwx$(q{YS+wi3?@L``gK|c2KEKQKQjT@w9#R_rGm@d`h$9=TS z584znNt$zVq`R_Gx*k51o|iAB`^jTz+`U^IlVd)ge)#6wyjl8OE~#6;P8_k*#JzZt zG#}29{@NPc4~u=m_|K-lv!p~iu3j0`0bSK&$4Fz=PU(H~MqKme4z7KI+!}eHpFLGC zq&YiVs>h8T^bh)7ad8^|y0kRp-m>;7^i%%qIO%gZ)W-+xghe)<%CH%QH@m0{Ud zdC=ueNzwDQscV#TJn>7!F>Pv?cZRbAXX@6ilkVrwrGDElY6JO#_V>PhE1~=MrRBF@ z#l2{u1d57ak4Id{dz4R*?Z*2)rxW#FrMyEwLU-><)84(x1rcYk-#I;2f@P%|TjG?l ztIy1op4YFW0e9b~O%f_EM-82n+SF9_i}fBMhyGI*^2PX&YrVrA($EPfik+ESBgI`PdTeGD@*LGBdb2^cjV8r zWU=~5-t{EJhj|zHV9w05tc_*K@a>Vf7tD{0A#}GK`CU4%Uspb<9y3~6Pn{Hh!O`d% zv+Bk<=H^`>pta&JJT_SGSTk%^Jmwsd_KO#z^fI4a6%|sm67`Jy6E8KZR-t|>#5H$L zynXGst7HGHO zgLy};$!q@nJn-s13H((gZD-F&+nF<_4BZR=8Gpu`7&PSTn(9so>ju{tYt+|TSSbFZ z`O&%=mz6)N&+KE(hq9sbG;+F3`#fvFD#y2Fi`FmwF>Po347(6tYaM8g(5LB(a+VS6 zfiry1deGW9emsl^*hG$He>Y;qJMAa$4=0DVGv98jy}*9Ukt5Q2Vo0xmt*$we1gTSCL=HZ|Wm*!x^Aq#}3mT(|5`OKdElw0=7G*PSLZ0bLTKG9f~}| zc!Og@_wI%Hle)ntj63aMZIa*E8z{>eH%oi1@oeg~#Js1fN@~}pY7Sa*|4=-H?w2cP z8S&F|kjH(YyJh10d8;&R&kV;0STp9wc5s?8Z+MfE&^I0qKi@Flz5|qj z=K^xC+2@RWGfu!H zv|gAejWg_u&}sNSpr0`|dQ*LRx_QQvkf8mUvj^vT_6dHE@*8p#2l@HZ1|E&VfmPp- z?>xtON^=7rRW@@7J)8|Z@$qU~sQjMJ8S2jv8Pum-@xGcEx*yM!LyUD-KGPY;v>CFz zNr~Feb$-TkOJ1&WuJ)GUVu`r77Z;-^q91U5xIMH1?^SOKxE#K*p3S{H13b5#J{9g8 z_|7%OFgVp;P#{js-8>(00h{atJ+EKuZy;h^*I~Dje?D_&Z``>PedDQOT6;V8aE+K? z{|ll1-HQi#mFK;%mA#$V@FNbyCVK_@dF!8r!hXbgoV`C-S_-V}m%6m|TDQb9`v&_v zXRP}4ZE6Sg(Pr*tJ$GI!3FCt{TXThH);jr(m Date: Sun, 29 Sep 2024 06:52:38 +0200 Subject: [PATCH 07/13] docs: add docker recurring backup examples --- README.md | 12 ++++++++++++ docs/index.md | 12 ++++++++++++ examples/docker-compose.scheduled.local.yaml | 8 +++++--- examples/docker-compose.scheduled.s3.yaml | 3 ++- 4 files changed, 31 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index fbca0ac..632944e 100644 --- a/README.md +++ b/README.md @@ -87,6 +87,18 @@ services: networks: web: ``` +### Docker recurring backup + +```shell + docker run --rm --network network_name \ + -v $PWD/backup:/backup/ \ + -e "DB_HOST=hostname" \ + -e "DB_USERNAME=user" \ + -e "DB_PASSWORD=password" \ + jkaninda/pg-bkup backup -d dbName --cron-expression "@every 1m" +``` +See: https://jkaninda.github.io/pg-bkup/reference/#predefined-schedules + ## Deploy on Kubernetes For Kubernetes, you don't need to run it in scheduled mode. You can deploy it as Job or CronJob. diff --git a/docs/index.md b/docs/index.md index a4c8620..9ddeb80 100644 --- a/docs/index.md +++ b/docs/index.md @@ -82,6 +82,18 @@ services: networks: web: ``` +### Docker recurring backup + +```shell + docker run --rm --network network_name \ + -v $PWD/backup:/backup/ \ + -e "DB_HOST=hostname" \ + -e "DB_USERNAME=user" \ + -e "DB_PASSWORD=password" \ + jkaninda/pg-bkup backup -d dbName --cron-expression "@every 1m" +``` +See: https://jkaninda.github.io/pg-bkup/reference/#predefined-schedules + ## Kubernetes ```yaml diff --git a/examples/docker-compose.scheduled.local.yaml b/examples/docker-compose.scheduled.local.yaml index b41071e..9b5b78f 100644 --- a/examples/docker-compose.scheduled.local.yaml +++ b/examples/docker-compose.scheduled.local.yaml @@ -3,12 +3,14 @@ services: pg-bkup: image: jkaninda/pg-bkup container_name: pg-bkup - command: backup --dbname database_name + command: backup --dbname database_name #--cron-expression "@every 5m" volumes: - ./backup:/backup environment: - DB_PORT=5432 - - DB_HOST=postgress + - DB_HOST=postgres - DB_USERNAME=userName - DB_PASSWORD=${DB_PASSWORD} - - BACKUP_CRON_EXPRESSION=0 1 * * * \ No newline at end of file + # Check https://jkaninda.github.io/pg-bkup/reference/#predefined-schedules + - BACKUP_CRON_EXPRESSION=@daily #@every 5m|@weekly | @monthly |0 1 * * * + diff --git a/examples/docker-compose.scheduled.s3.yaml b/examples/docker-compose.scheduled.s3.yaml index 3727e57..ea1ef2b 100644 --- a/examples/docker-compose.scheduled.s3.yaml +++ b/examples/docker-compose.scheduled.s3.yaml @@ -21,7 +21,8 @@ services: - AWS_SECRET_KEY=xxxxx ## In case you are using S3 alternative such as Minio and your Minio instance is not secured, you change it to true - AWS_DISABLE_SSL="false" - - BACKUP_CRON_EXPRESSION=0 1 * * * + # Check https://jkaninda.github.io/pg-bkup/reference/#predefined-schedules + - BACKUP_CRON_EXPRESSION=@daily #@every 5m|@weekly | @monthly |0 1 * * * # pg-bkup container must be connected to the same network with your database networks: - web From e388d0ca144d716ddd6a96fc81e414d1886260ed Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sun, 29 Sep 2024 07:31:52 +0200 Subject: [PATCH 08/13] chore: add test configurations before running in scheduled mode --- pkg/backup.go | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/pkg/backup.go b/pkg/backup.go index b243fa5..105cee4 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -44,8 +44,11 @@ func scheduledMode(db *dbConfig, config *BackupConfig) { //Test database connexion testDatabaseConnection(db) - - utils.Info("Creating cron job...") + //Test backup + utils.Info("Testing backup configurations...") + BackupTask(db, config) + utils.Info("Testing backup configurations...done") + utils.Info("Creating backup job...") // Create a new cron instance c := cron.New() @@ -57,7 +60,8 @@ func scheduledMode(db *dbConfig, config *BackupConfig) { } // Start the cron scheduler c.Start() - utils.Info("Creating cron job...done") + utils.Info("Creating backup job...done") + utils.Info("Backup job started") defer c.Stop() select {} } @@ -84,7 +88,7 @@ func BackupTask(db *dbConfig, config *BackupConfig) { } func intro() { utils.Info("Starting PostgreSQL Backup...") - utils.Info("Copyright © 2024 Jonas Kaninda ") + utils.Info("Copyright (c) 2024 Jonas Kaninda ") } // BackupDatabase backup database From b151489324aa7baf570f2de3c9db63a541db3f16 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sun, 29 Sep 2024 20:00:30 +0200 Subject: [PATCH 09/13] refactor: replace function params by config struct --- pkg/backup.go | 65 +++++++++++++++++++++++++-------------------------- pkg/config.go | 8 +++---- 2 files changed, 36 insertions(+), 37 deletions(-) diff --git a/pkg/backup.go b/pkg/backup.go index 105cee4..6243e5a 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -74,16 +74,16 @@ func BackupTask(db *dbConfig, config *BackupConfig) { } config.backupFileName = backupFileName switch config.storage { - case "s3": - s3Backup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) case "local": - localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + localBackup(db, config) + case "s3": + s3Backup(db, config) case "ssh", "remote": - sshBackup(db, config.backupFileName, config.remotePath, config.disableCompression, config.prune, config.backupRetention, config.encryption) + sshBackup(db, config) case "ftp": utils.Fatal("Not supported storage type: %s", config.storage) default: - localBackup(db, config.backupFileName, config.disableCompression, config.prune, config.backupRetention, config.encryption) + localBackup(db, config) } } func intro() { @@ -162,36 +162,36 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool utils.Info("Database has been backed up") } -func localBackup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func localBackup(db *dbConfig, config *BackupConfig) { utils.Info("Backup database to local storage") - BackupDatabase(db, backupFileName, disableCompression) - finalFileName := backupFileName - if encrypt { - encryptBackup(backupFileName) - finalFileName = fmt.Sprintf("%s.%s", backupFileName, gpgExtension) + BackupDatabase(db, config.backupFileName, disableCompression) + finalFileName := config.backupFileName + if config.encryption { + encryptBackup(config.backupFileName, config.passphrase) + finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, gpgExtension) } utils.Info("Backup name is %s", finalFileName) moveToBackup(finalFileName, storagePath) //Send notification utils.NotifySuccess(finalFileName) //Delete old backup - if prune { - deleteOldBackup(backupRetention) + if config.prune { + deleteOldBackup(config.backupRetention) } //Delete temp deleteTemp() } -func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func s3Backup(db *dbConfig, config *BackupConfig) { bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH") utils.Info("Backup database to s3 storage") //Backup database - BackupDatabase(db, backupFileName, disableCompression) - finalFileName := backupFileName - if encrypt { - encryptBackup(backupFileName) - finalFileName = fmt.Sprintf("%s.%s", backupFileName, "gpg") + BackupDatabase(db, config.backupFileName, disableCompression) + finalFileName := config.backupFileName + if config.encryption { + encryptBackup(config.backupFileName, config.passphrase) + finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") } utils.Info("Uploading backup archive to remote storage S3 ... ") @@ -203,14 +203,14 @@ func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prun } //Delete backup file from tmp folder - err = utils.DeleteFile(filepath.Join(tmpPath, backupFileName)) + err = utils.DeleteFile(filepath.Join(tmpPath, config.backupFileName)) if err != nil { fmt.Println("Error deleting file: ", err) } // Delete old backup - if prune { - err := utils.DeleteOldBackup(bucket, s3Path, backupRetention) + if config.prune { + err := utils.DeleteOldBackup(bucket, s3Path, config.backupRetention) if err != nil { utils.Fatal("Error deleting old backup from S3: %s ", err) } @@ -221,18 +221,18 @@ func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prun //Delete temp deleteTemp() } -func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) { +func sshBackup(db *dbConfig, config *BackupConfig) { utils.Info("Backup database to Remote server") //Backup database - BackupDatabase(db, backupFileName, disableCompression) - finalFileName := backupFileName - if encrypt { - encryptBackup(backupFileName) - finalFileName = fmt.Sprintf("%s.%s", backupFileName, "gpg") + BackupDatabase(db, config.backupFileName, disableCompression) + finalFileName := config.backupFileName + if config.encryption { + encryptBackup(config.backupFileName, config.passphrase) + finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") } utils.Info("Uploading backup archive to remote storage ... ") utils.Info("Backup name is %s", finalFileName) - err := CopyToRemote(finalFileName, remotePath) + err := CopyToRemote(finalFileName, config.remotePath) if err != nil { utils.Fatal("Error uploading file to the remote server: %s ", err) @@ -244,7 +244,7 @@ func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompressi utils.Error("Error deleting file: %v", err) } - if prune { + if config.prune { //TODO: Delete old backup from remote server utils.Info("Deleting old backup from a remote server is not implemented yet") @@ -257,9 +257,8 @@ func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompressi deleteTemp() } -func encryptBackup(backupFileName string) { - gpgPassphrase := os.Getenv("GPG_PASSPHRASE") - err := Encrypt(filepath.Join(tmpPath, backupFileName), gpgPassphrase) +func encryptBackup(backupFileName, gpqPassphrase string) { + err := Encrypt(filepath.Join(tmpPath, backupFileName), gpqPassphrase) if err != nil { utils.Fatal("Error during encrypting backup %v", err) } diff --git a/pkg/config.go b/pkg/config.go index 603880e..942008e 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -40,7 +40,7 @@ type BackupConfig struct { prune bool encryption bool remotePath string - gpqPassphrase string + passphrase string storage string cronExpression string } @@ -74,11 +74,11 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { prune, _ := cmd.Flags().GetBool("prune") disableCompression, _ = cmd.Flags().GetBool("disable-compression") _, _ = cmd.Flags().GetString("mode") - gpqPassphrase := os.Getenv("GPG_PASSPHRASE") + passphrase := os.Getenv("GPG_PASSPHRASE") _ = utils.GetEnv(cmd, "path", "AWS_S3_PATH") cronExpression := os.Getenv("BACKUP_CRON_EXPRESSION") - if gpqPassphrase != "" { + if passphrase != "" { encryption = true } @@ -90,7 +90,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { config.storage = storage config.encryption = encryption config.remotePath = remotePath - config.gpqPassphrase = gpqPassphrase + config.passphrase = passphrase config.cronExpression = cronExpression return &config } From ee25f2dcff7c0c7d5515923c48c8a01c5b63d41b Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sun, 29 Sep 2024 20:32:11 +0200 Subject: [PATCH 10/13] chore: migrate baseos from Ubuntu to Alpine --- docker/Dockerfile | 20 +++----------------- 1 file changed, 3 insertions(+), 17 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 4b62dcf..c67a6a2 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -9,7 +9,7 @@ RUN go mod download # Build RUN CGO_ENABLED=0 GOOS=linux go build -o /app/pg-bkup -FROM ubuntu:24.04 +FROM alpine:3.20.3 ENV DB_HOST="" ENV DB_NAME="" ENV DB_USERNAME="" @@ -35,7 +35,6 @@ ENV TARGET_DB_PORT=5432 ENV TARGET_DB_NAME="" ENV TARGET_DB_USERNAME="" ENV TARGET_DB_PASSWORD="" -ARG DEBIAN_FRONTEND=noninteractive ENV VERSION="v1.2.8" ENV BACKUP_CRON_EXPRESSION="" ENV TG_TOKEN="" @@ -43,29 +42,16 @@ ENV TG_CHAT_ID="" ARG WORKDIR="/config" ARG BACKUPDIR="/backup" ARG BACKUP_TMP_DIR="/tmp/backup" -ARG BACKUP_CRON="/etc/cron.d/backup_cron" -ARG BACKUP_CRON_SCRIPT="/usr/local/bin/backup_cron.sh" LABEL author="Jonas Kaninda" - -RUN apt-get update -qq - -RUN apt install postgresql-client cron gnupg -y - -# Clear cache -RUN apt-get clean && rm -rf /var/lib/apt/lists/* - +RUN apk --update add postgresql-client gnupg RUN mkdir $WORKDIR RUN mkdir $BACKUPDIR RUN mkdir -p $BACKUP_TMP_DIR RUN chmod 777 $WORKDIR RUN chmod 777 $BACKUPDIR RUN chmod 777 $BACKUP_TMP_DIR -RUN touch $BACKUP_CRON && \ - touch $BACKUP_CRON_SCRIPT && \ - chmod 777 $WORKDIR && \ - chmod 777 $BACKUP_CRON && \ - chmod 777 $BACKUP_CRON_SCRIPT +RUN chmod 777 $WORKDIR COPY --from=build /app/pg-bkup /usr/local/bin/pg-bkup RUN chmod +x /usr/local/bin/pg-bkup From 49a5a4f3f70a232114da24526f2eae563fd49311 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sun, 29 Sep 2024 20:38:07 +0200 Subject: [PATCH 11/13] docs: update baseos --- README.md | 2 +- docs/index.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 632944e..4de16b3 100644 --- a/README.md +++ b/README.md @@ -167,7 +167,7 @@ While it may work against different implementations, there are no guarantees abo We decided to publish this image as a simpler and more lightweight alternative because of the following requirements: -- The original image is based on `ubuntu` and requires additional tools, making it heavy. +- The original image is based on `Alpine` and requires additional tools, making it heavy. - This image is written in Go. - `arm64` and `arm/v7` architectures are supported. - Docker in Swarm mode is supported. diff --git a/docs/index.md b/docs/index.md index 9ddeb80..b5a5e46 100644 --- a/docs/index.md +++ b/docs/index.md @@ -158,7 +158,7 @@ While it may work against different implementations, there are no guarantees abo We decided to publish this image as a simpler and more lightweight alternative because of the following requirements: -- The original image is based on `ubuntu` and requires additional tools, making it heavy. +- The original image is based on `Alpine` and requires additional tools, making it heavy. - This image is written in Go. - `arm64` and `arm/v7` architectures are supported. - Docker in Swarm mode is supported. From 9c3bfe83fba673ccdb68697f877435cf9f94fbf0 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Sun, 29 Sep 2024 23:53:19 +0200 Subject: [PATCH 12/13] feat: add ftp backup storage --- docker/Dockerfile | 8 +++- docker/supervisord.conf | 13 ------ docs/how-tos/backup-to-ssh.md | 10 ++--- docs/index.md | 2 +- go.mod | 3 ++ go.sum | 7 +++ pkg/backup.go | 42 ++++++++++++++++-- pkg/config.go | 30 +++++++++++-- pkg/ftp.go | 81 +++++++++++++++++++++++++++++++++++ pkg/restore.go | 22 +++++++--- {utils => pkg}/s3.go | 35 +++++++-------- pkg/var.go | 8 +++- 12 files changed, 209 insertions(+), 52 deletions(-) delete mode 100644 docker/supervisord.conf create mode 100644 pkg/ftp.go rename {utils => pkg}/s3.go (78%) diff --git a/docker/Dockerfile b/docker/Dockerfile index c67a6a2..b02f1af 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -25,11 +25,15 @@ ENV AWS_REGION="us-west-2" ENV AWS_DISABLE_SSL="false" ENV GPG_PASSPHRASE="" ENV SSH_USER="" -ENV SSH_REMOTE_PATH="" ENV SSH_PASSWORD="" ENV SSH_HOST_NAME="" ENV SSH_IDENTIFY_FILE="" -ENV SSH_PORT="22" +ENV SSH_PORT=22 +ENV REMOTE_PATH="" +ENV FTP_HOST_NAME="" +ENV FTP_PORT=21 +ENV FTP_USER="" +ENV FTP_PASSWORD="" ENV TARGET_DB_HOST="" ENV TARGET_DB_PORT=5432 ENV TARGET_DB_NAME="" diff --git a/docker/supervisord.conf b/docker/supervisord.conf deleted file mode 100644 index 84b35a1..0000000 --- a/docker/supervisord.conf +++ /dev/null @@ -1,13 +0,0 @@ -[supervisord] -nodaemon=true -user=root -logfile=/var/log/supervisor/supervisord.log -pidfile=/var/run/supervisord.pid - -[program:cron] -command = /bin/bash -c "declare -p | grep -Ev '^declare -[[:alpha:]]*r' > /run/supervisord.env && /usr/sbin/cron -f -L 15" -autostart=true -autorestart=true -user = root -stderr_logfile=/var/log/cron.err.log -stdout_logfile=/var/log/cron.out.log \ No newline at end of file diff --git a/docs/how-tos/backup-to-ssh.md b/docs/how-tos/backup-to-ssh.md index 1d5a9e6..85b4db3 100644 --- a/docs/how-tos/backup-to-ssh.md +++ b/docs/how-tos/backup-to-ssh.md @@ -8,10 +8,10 @@ nav_order: 3 As described for s3 backup section, to change the storage of your backup and use SSH Remote server as storage. You need to add `--storage ssh` or `--storage remote`. -You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `SSH_REMOTE_PATH` environment variable. +You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable. {: .note } -These environment variables are required for SSH backup `SSH_HOST_NAME`, `SSH_USER`, `SSH_REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server. +These environment variables are required for SSH backup `SSH_HOST_NAME`, `SSH_USER`, `REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server. Accessing the remote server using password is not recommended, use private key instead. ```yml @@ -36,7 +36,7 @@ services: - SSH_HOST_NAME="hostname" - SSH_PORT=22 - SSH_USER=user - - SSH_REMOTE_PATH=/home/jkaninda/backups + - REMOTE_PATH=/home/jkaninda/backups - SSH_IDENTIFY_FILE=/tmp/id_ed25519 ## We advise you to use a private jey instead of password #- SSH_PASSWORD=password @@ -76,7 +76,7 @@ services: - SSH_HOST_NAME="hostname" - SSH_PORT=22 - SSH_USER=user - - SSH_REMOTE_PATH=/home/jkaninda/backups + - REMOTE_PATH=/home/jkaninda/backups - SSH_IDENTIFY_FILE=/tmp/id_ed25519 ## We advise you to use a private jey instead of password #- SSH_PASSWORD=password @@ -130,7 +130,7 @@ spec: value: "22" - name: SSH_USER value: "xxx" - - name: SSH_REMOTE_PATH + - name: REMOTE_PATH value: "/home/jkaninda/backups" - name: AWS_ACCESS_KEY value: "xxxx" diff --git a/docs/index.md b/docs/index.md index b5a5e46..33d8608 100644 --- a/docs/index.md +++ b/docs/index.md @@ -6,7 +6,7 @@ nav_order: 1 # About pg-bkup {:.no_toc} -PostreSQL Backup is a Docker container image that can be used to backup, restore and migrate Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage. +PostreSQL Backup is a Docker container image that can be used to backup, restore and migrate Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, ftp and SSH compatible storage. It also supports database __encryption__ using GPG. diff --git a/go.mod b/go.mod index b9683c2..d3d5c5b 100644 --- a/go.mod +++ b/go.mod @@ -10,7 +10,10 @@ require ( require ( github.com/aws/aws-sdk-go v1.55.3 // indirect github.com/bramvdbogaerde/go-scp v1.5.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hpcloud/tail v1.0.0 // indirect + github.com/jlaffaye/ftp v0.2.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect golang.org/x/crypto v0.18.0 // indirect diff --git a/go.sum b/go.sum index e079d64..359196b 100644 --- a/go.sum +++ b/go.sum @@ -12,10 +12,17 @@ github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg= +github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= diff --git a/pkg/backup.go b/pkg/backup.go index 6243e5a..21ad2ac 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -81,7 +81,8 @@ func BackupTask(db *dbConfig, config *BackupConfig) { case "ssh", "remote": sshBackup(db, config) case "ftp": - utils.Fatal("Not supported storage type: %s", config.storage) + ftpBackup(db, config) + //utils.Fatal("Not supported storage type: %s", config.storage) default: localBackup(db, config) } @@ -196,7 +197,7 @@ func s3Backup(db *dbConfig, config *BackupConfig) { utils.Info("Uploading backup archive to remote storage S3 ... ") utils.Info("Backup name is %s", finalFileName) - err := utils.UploadFileToS3(tmpPath, finalFileName, bucket, s3Path) + err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path) if err != nil { utils.Fatal("Error uploading backup archive to S3: %s ", err) @@ -210,7 +211,7 @@ func s3Backup(db *dbConfig, config *BackupConfig) { } // Delete old backup if config.prune { - err := utils.DeleteOldBackup(bucket, s3Path, config.backupRetention) + err := DeleteOldBackup(bucket, s3Path, config.backupRetention) if err != nil { utils.Fatal("Error deleting old backup from S3: %s ", err) } @@ -256,6 +257,41 @@ func sshBackup(db *dbConfig, config *BackupConfig) { //Delete temp deleteTemp() } +func ftpBackup(db *dbConfig, config *BackupConfig) { + utils.Info("Backup database to the remote FTP server") + //Backup database + BackupDatabase(db, config.backupFileName, disableCompression) + finalFileName := config.backupFileName + if config.encryption { + encryptBackup(config.backupFileName, config.passphrase) + finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") + } + utils.Info("Uploading backup archive to the remote FTP server ... ") + utils.Info("Backup name is %s", finalFileName) + err := CopyToFTP(finalFileName, config.remotePath) + if err != nil { + utils.Fatal("Error uploading file to the remote FTP server: %s ", err) + + } + + //Delete backup file from tmp folder + err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName)) + if err != nil { + utils.Error("Error deleting file: %v", err) + + } + if config.prune { + //TODO: Delete old backup from remote server + utils.Info("Deleting old backup from a remote server is not implemented yet") + + } + + utils.Done("Uploading backup archive to the remote FTP server ... done ") + //Send notification + utils.NotifySuccess(finalFileName) + //Delete temp + deleteTemp() +} func encryptBackup(backupFileName, gpqPassphrase string) { err := Encrypt(filepath.Join(tmpPath, backupFileName), gpqPassphrase) diff --git a/pkg/config.go b/pkg/config.go index 942008e..c2136a9 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -44,6 +44,29 @@ type BackupConfig struct { storage string cronExpression string } +type FTPConfig struct { + host string + user string + password string + port string + remotePath string +} + +func initFtpConfig() *FTPConfig { + //Initialize backup configs + fConfig := FTPConfig{} + fConfig.host = os.Getenv("FTP_HOST_NAME") + fConfig.user = os.Getenv("FTP_USER") + fConfig.password = os.Getenv("FTP_PASSWORD") + fConfig.port = os.Getenv("FTP_PORT") + fConfig.remotePath = os.Getenv("REMOTE_PATH") + err := utils.CheckEnvVars(ftpVars) + if err != nil { + utils.Error("Please make sure all required environment variables for FTP are set") + utils.Fatal("Error checking environment variables: %s", err) + } + return &fConfig +} func initDbConfig(cmd *cobra.Command) *dbConfig { //Set env @@ -66,9 +89,9 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { utils.SetEnv("STORAGE_PATH", storagePath) utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION") utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") - + utils.GetEnv(cmd, "path", "REMOTE_PATH") //Get flag value and set env - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") backupRetention, _ := cmd.Flags().GetInt("keep-last") prune, _ := cmd.Flags().GetBool("prune") @@ -106,10 +129,11 @@ type RestoreConfig struct { func initRestoreConfig(cmd *cobra.Command) *RestoreConfig { utils.SetEnv("STORAGE_PATH", storagePath) + utils.GetEnv(cmd, "path", "REMOTE_PATH") //Get flag value and set env s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") file = utils.GetEnv(cmd, "file", "FILE_NAME") _, _ = cmd.Flags().GetString("mode") diff --git a/pkg/ftp.go b/pkg/ftp.go new file mode 100644 index 0000000..9ce9319 --- /dev/null +++ b/pkg/ftp.go @@ -0,0 +1,81 @@ +package pkg + +import ( + "fmt" + "github.com/jlaffaye/ftp" + "io" + "os" + "path/filepath" + "time" +) + +// initFtpClient initializes and authenticates an FTP client +func initFtpClient() (*ftp.ServerConn, error) { + ftpConfig := initFtpConfig() + ftpClient, err := ftp.Dial(fmt.Sprintf("%s:%s", ftpConfig.host, ftpConfig.port), ftp.DialWithTimeout(5*time.Second)) + if err != nil { + return nil, fmt.Errorf("failed to connect to FTP: %w", err) + } + + err = ftpClient.Login(ftpConfig.user, ftpConfig.password) + if err != nil { + return nil, fmt.Errorf("failed to log in to FTP: %w", err) + } + + return ftpClient, nil +} + +// CopyToFTP uploads a file to the remote FTP server +func CopyToFTP(fileName, remotePath string) (err error) { + ftpConfig := initFtpConfig() + ftpClient, err := initFtpClient() + if err != nil { + return err + } + defer ftpClient.Quit() + + filePath := filepath.Join(tmpPath, fileName) + file, err := os.Open(filePath) + if err != nil { + return fmt.Errorf("failed to open file %s: %w", fileName, err) + } + defer file.Close() + + remoteFilePath := filepath.Join(ftpConfig.remotePath, fileName) + err = ftpClient.Stor(remoteFilePath, file) + if err != nil { + return fmt.Errorf("failed to upload file %s: %w", fileName, err) + } + + return nil +} + +// CopyFromFTP downloads a file from the remote FTP server +func CopyFromFTP(fileName, remotePath string) (err error) { + ftpClient, err := initFtpClient() + if err != nil { + return err + } + defer ftpClient.Quit() + + remoteFilePath := filepath.Join(remotePath, fileName) + r, err := ftpClient.Retr(remoteFilePath) + if err != nil { + return fmt.Errorf("failed to retrieve file %s: %w", fileName, err) + } + defer r.Close() + + localFilePath := filepath.Join(tmpPath, fileName) + outFile, err := os.Create(localFilePath) + if err != nil { + return fmt.Errorf("failed to create local file %s: %w", fileName, err) + } + defer outFile.Close() + + _, err = io.Copy(outFile, r) + if err != nil { + return fmt.Errorf("failed to copy data to local file %s: %w", fileName, err) + } + + return nil +} diff --git a/pkg/restore.go b/pkg/restore.go index ca13d6a..970b527 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -30,7 +30,7 @@ func StartRestore(cmd *cobra.Command) { case "ssh": restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath) case "ftp": - utils.Fatal("Restore from FTP is not yet supported") + restoreFromFTP(dbConf, restoreConf.file, restoreConf.remotePath) default: utils.Info("Restore database from local") copyToTmp(storagePath, restoreConf.file) @@ -40,7 +40,7 @@ func StartRestore(cmd *cobra.Command) { func restoreFromS3(db *dbConfig, file, bucket, s3Path string) { utils.Info("Restore database from s3") - err := utils.DownloadFile(tmpPath, file, bucket, s3Path) + err := DownloadFile(tmpPath, file, bucket, s3Path) if err != nil { utils.Fatal("Error download file from s3 %s %v ", file, err) } @@ -54,6 +54,14 @@ func restoreFromRemote(db *dbConfig, file, remotePath string) { } RestoreDatabase(db, file) } +func restoreFromFTP(db *dbConfig, file, remotePath string) { + utils.Info("Restore database from FTP server") + err := CopyFromFTP(file, remotePath) + if err != nil { + utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(remotePath, file), err) + } + RestoreDatabase(db, file) +} // RestoreDatabase restore database func RestoreDatabase(db *dbConfig, file string) { @@ -93,11 +101,11 @@ func RestoreDatabase(db *dbConfig, file string) { testDatabaseConnection(db) utils.Info("Restoring database...") - extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file)) + extension := filepath.Ext(file) // Restore from compressed file / .sql.gz if extension == ".gz" { - str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName - _, err := exec.Command("bash", "-c", str).Output() + str := "zcat " + filepath.Join(tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName + _, err := exec.Command("sh", "-c", str).Output() if err != nil { utils.Fatal("Error, in restoring the database %v", err) } @@ -108,8 +116,8 @@ func RestoreDatabase(db *dbConfig, file string) { } else if extension == ".sql" { //Restore from sql file - str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName - _, err := exec.Command("bash", "-c", str).Output() + str := "cat " + filepath.Join(tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName + _, err := exec.Command("sh", "-c", str).Output() if err != nil { utils.Fatal("Error in restoring the database %v", err) } diff --git a/utils/s3.go b/pkg/s3.go similarity index 78% rename from utils/s3.go rename to pkg/s3.go index 57d5f8a..9a80c16 100644 --- a/utils/s3.go +++ b/pkg/s3.go @@ -1,10 +1,10 @@ -// Package utils / +// Package pkg /***** @author Jonas Kaninda @license MIT License @Copyright © 2024 Jonas Kaninda **/ -package utils +package pkg import ( "bytes" @@ -13,6 +13,7 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3manager" + "github.com/jkaninda/pg-bkup/utils" "net/http" "os" "path/filepath" @@ -32,20 +33,20 @@ func CreateSession() (*session.Session, error) { "AWS_REGION", } - endPoint := GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") - accessKey := GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") - secretKey := GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY") - _ = GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + endPoint := utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") + accessKey := utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") + secretKey := utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY") + _ = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") region := os.Getenv("AWS_REGION") awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL")) if err != nil { - Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err) + utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err) } - err = CheckEnvVars(awsVars) + err = utils.CheckEnvVars(awsVars) if err != nil { - Fatal("Error checking environment variables\n: %s", err) + utils.Fatal("Error checking environment variables\n: %s", err) } // Configure to use MinIO Server s3Config := &aws.Config{ @@ -105,10 +106,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error { if err != nil { return err } - Info("Download backup from S3 storage...") + utils.Info("Download backup from S3 storage...") file, err := os.Create(filepath.Join(destinationPath, key)) if err != nil { - Error("Failed to create file", err) + utils.Error("Failed to create file", err) return err } defer file.Close() @@ -122,10 +123,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error { Key: aws.String(objectKey), }) if err != nil { - Error("Failed to download file", err) + utils.Error("Failed to download file %s", key) return err } - Info("Backup downloaded: %s bytes size %s ", file.Name(), numBytes) + utils.Info("Backup downloaded: %s bytes size %s ", file.Name(), numBytes) return nil } @@ -155,18 +156,18 @@ func DeleteOldBackup(bucket, prefix string, retention int) error { Key: object.Key, }) if err != nil { - Info("Failed to delete object %s: %v", *object.Key, err) + utils.Info("Failed to delete object %s: %v", *object.Key, err) } else { - Info("Deleted object %s\n", *object.Key) + utils.Info("Deleted object %s\n", *object.Key) } } } return !lastPage }) if err != nil { - Error("Failed to list objects: %v", err) + utils.Error("Failed to list objects: %v", err) } - Info("Finished deleting old files.") + utils.Info("Finished deleting old files.") return nil } diff --git a/pkg/var.go b/pkg/var.go index 52dcf2e..2ec9f88 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -41,9 +41,15 @@ var targetDbConf *targetDbConfig // sshVars Required environment variables for SSH remote server storage var sshVars = []string{ "SSH_USER", - "SSH_REMOTE_PATH", "SSH_HOST_NAME", "SSH_PORT", + "REMOTE_PATH", +} +var ftpVars = []string{ + "FTP_HOST_NAME", + "FTP_USER", + "FTP_PASSWORD", + "FTP_PORT", } // AwsVars Required environment variables for AWS S3 storage From be9707e91d5d8ab3d4ce7c01cfb08abba437d620 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Mon, 30 Sep 2024 00:00:21 +0200 Subject: [PATCH 13/13] docs: add ssh backup example --- README.md | 2 +- docs/how-tos/backup-to-ftp.md | 44 ++++++++++++++++++++++++++++ docs/how-tos/deploy-on-kubernetes.md | 2 +- docs/how-tos/encrypt-backup.md | 2 +- docs/how-tos/migrate.md | 2 +- docs/how-tos/restore-from-s3.md | 2 +- docs/how-tos/restore-from-ssh.md | 2 +- docs/how-tos/restore.md | 2 +- 8 files changed, 51 insertions(+), 7 deletions(-) create mode 100644 docs/how-tos/backup-to-ftp.md diff --git a/README.md b/README.md index 4de16b3..31f98d1 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # PostgreSQL Backup -PostgreSQL Backup is a Docker container image that can be used to backup, restore and migrate Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage. +PostgreSQL Backup is a Docker container image that can be used to backup, restore and migrate Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, FTP and SSH compatible storage. It also supports __encrypting__ your backups using GPG. The [jkaninda/pg-bkup](https://hub.docker.com/r/jkaninda/pg-bkup) Docker image can be deployed on Docker, Docker Swarm and Kubernetes. diff --git a/docs/how-tos/backup-to-ftp.md b/docs/how-tos/backup-to-ftp.md new file mode 100644 index 0000000..5c27d46 --- /dev/null +++ b/docs/how-tos/backup-to-ftp.md @@ -0,0 +1,44 @@ +--- +title: Backup to FTP remote server +layout: default +parent: How Tos +nav_order: 4 +--- +# Backup to FTP remote server + + +As described for s3 backup section, to change the storage of your backup and use FTP Remote server as storage. You need to add `--storage ftp`. +You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable. + +{: .note } +These environment variables are required for SSH backup `FTP_HOST_NAME`, `FTP_USER`, `REMOTE_PATH`, `FTP_PORT` or `FTP_PASSWORD`. + +```yml +services: + pg-bkup: + # In production, it is advised to lock your image tag to a proper + # release version instead of using `latest`. + # Check https://github.com/jkaninda/pg-bkup/releases + # for a list of available releases. + image: jkaninda/pg-bkup + container_name: pg-bkup + command: backup --storage ftp -d database + environment: + - DB_PORT=5432 + - DB_HOST=postgres + - DB_NAME=database + - DB_USERNAME=username + - DB_PASSWORD=password + ## FTP config + - FTP_HOST_NAME="hostname" + - FTP_PORT=21 + - FTP_USER=user + - FTP_PASSWORD=password + - REMOTE_PATH=/home/jkaninda/backups + + # pg-bkup container must be connected to the same network with your database + networks: + - web +networks: + web: +``` \ No newline at end of file diff --git a/docs/how-tos/deploy-on-kubernetes.md b/docs/how-tos/deploy-on-kubernetes.md index 2e6a1e4..5566d4e 100644 --- a/docs/how-tos/deploy-on-kubernetes.md +++ b/docs/how-tos/deploy-on-kubernetes.md @@ -2,7 +2,7 @@ title: Deploy on Kubernetes layout: default parent: How Tos -nav_order: 8 +nav_order: 9 --- ## Deploy on Kubernetes diff --git a/docs/how-tos/encrypt-backup.md b/docs/how-tos/encrypt-backup.md index fbe976e..b228fe2 100644 --- a/docs/how-tos/encrypt-backup.md +++ b/docs/how-tos/encrypt-backup.md @@ -2,7 +2,7 @@ title: Encrypt backups using GPG layout: default parent: How Tos -nav_order: 7 +nav_order: 8 --- # Encrypt backup diff --git a/docs/how-tos/migrate.md b/docs/how-tos/migrate.md index e64cccb..b59925c 100644 --- a/docs/how-tos/migrate.md +++ b/docs/how-tos/migrate.md @@ -2,7 +2,7 @@ title: Migrate database layout: default parent: How Tos -nav_order: 9 +nav_order: 10 --- # Migrate database diff --git a/docs/how-tos/restore-from-s3.md b/docs/how-tos/restore-from-s3.md index 485c650..0f4a2e2 100644 --- a/docs/how-tos/restore-from-s3.md +++ b/docs/how-tos/restore-from-s3.md @@ -2,7 +2,7 @@ title: Restore database from AWS S3 layout: default parent: How Tos -nav_order: 5 +nav_order: 6 --- # Restore database from S3 storage diff --git a/docs/how-tos/restore-from-ssh.md b/docs/how-tos/restore-from-ssh.md index 646ec21..6aaffeb 100644 --- a/docs/how-tos/restore-from-ssh.md +++ b/docs/how-tos/restore-from-ssh.md @@ -2,7 +2,7 @@ title: Restore database from SSH layout: default parent: How Tos -nav_order: 6 +nav_order: 7 --- # Restore database from SSH remote server diff --git a/docs/how-tos/restore.md b/docs/how-tos/restore.md index d8f8f56..91f91fb 100644 --- a/docs/how-tos/restore.md +++ b/docs/how-tos/restore.md @@ -2,7 +2,7 @@ title: Restore database layout: default parent: How Tos -nav_order: 4 +nav_order: 5 --- # Restore database