diff --git a/docker/Dockerfile b/docker/Dockerfile index c67a6a2..b02f1af 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -25,11 +25,15 @@ ENV AWS_REGION="us-west-2" ENV AWS_DISABLE_SSL="false" ENV GPG_PASSPHRASE="" ENV SSH_USER="" -ENV SSH_REMOTE_PATH="" ENV SSH_PASSWORD="" ENV SSH_HOST_NAME="" ENV SSH_IDENTIFY_FILE="" -ENV SSH_PORT="22" +ENV SSH_PORT=22 +ENV REMOTE_PATH="" +ENV FTP_HOST_NAME="" +ENV FTP_PORT=21 +ENV FTP_USER="" +ENV FTP_PASSWORD="" ENV TARGET_DB_HOST="" ENV TARGET_DB_PORT=5432 ENV TARGET_DB_NAME="" diff --git a/docker/supervisord.conf b/docker/supervisord.conf deleted file mode 100644 index 84b35a1..0000000 --- a/docker/supervisord.conf +++ /dev/null @@ -1,13 +0,0 @@ -[supervisord] -nodaemon=true -user=root -logfile=/var/log/supervisor/supervisord.log -pidfile=/var/run/supervisord.pid - -[program:cron] -command = /bin/bash -c "declare -p | grep -Ev '^declare -[[:alpha:]]*r' > /run/supervisord.env && /usr/sbin/cron -f -L 15" -autostart=true -autorestart=true -user = root -stderr_logfile=/var/log/cron.err.log -stdout_logfile=/var/log/cron.out.log \ No newline at end of file diff --git a/docs/how-tos/backup-to-ssh.md b/docs/how-tos/backup-to-ssh.md index 1d5a9e6..85b4db3 100644 --- a/docs/how-tos/backup-to-ssh.md +++ b/docs/how-tos/backup-to-ssh.md @@ -8,10 +8,10 @@ nav_order: 3 As described for s3 backup section, to change the storage of your backup and use SSH Remote server as storage. You need to add `--storage ssh` or `--storage remote`. -You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `SSH_REMOTE_PATH` environment variable. +You need to add the full remote path by adding `--path /home/jkaninda/backups` flag or using `REMOTE_PATH` environment variable. {: .note } -These environment variables are required for SSH backup `SSH_HOST_NAME`, `SSH_USER`, `SSH_REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server. +These environment variables are required for SSH backup `SSH_HOST_NAME`, `SSH_USER`, `REMOTE_PATH`, `SSH_IDENTIFY_FILE`, `SSH_PORT` or `SSH_PASSWORD` if you dont use a private key to access to your server. Accessing the remote server using password is not recommended, use private key instead. ```yml @@ -36,7 +36,7 @@ services: - SSH_HOST_NAME="hostname" - SSH_PORT=22 - SSH_USER=user - - SSH_REMOTE_PATH=/home/jkaninda/backups + - REMOTE_PATH=/home/jkaninda/backups - SSH_IDENTIFY_FILE=/tmp/id_ed25519 ## We advise you to use a private jey instead of password #- SSH_PASSWORD=password @@ -76,7 +76,7 @@ services: - SSH_HOST_NAME="hostname" - SSH_PORT=22 - SSH_USER=user - - SSH_REMOTE_PATH=/home/jkaninda/backups + - REMOTE_PATH=/home/jkaninda/backups - SSH_IDENTIFY_FILE=/tmp/id_ed25519 ## We advise you to use a private jey instead of password #- SSH_PASSWORD=password @@ -130,7 +130,7 @@ spec: value: "22" - name: SSH_USER value: "xxx" - - name: SSH_REMOTE_PATH + - name: REMOTE_PATH value: "/home/jkaninda/backups" - name: AWS_ACCESS_KEY value: "xxxx" diff --git a/docs/index.md b/docs/index.md index b5a5e46..33d8608 100644 --- a/docs/index.md +++ b/docs/index.md @@ -6,7 +6,7 @@ nav_order: 1 # About pg-bkup {:.no_toc} -PostreSQL Backup is a Docker container image that can be used to backup, restore and migrate Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage. +PostreSQL Backup is a Docker container image that can be used to backup, restore and migrate Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, ftp and SSH compatible storage. It also supports database __encryption__ using GPG. diff --git a/go.mod b/go.mod index b9683c2..d3d5c5b 100644 --- a/go.mod +++ b/go.mod @@ -10,7 +10,10 @@ require ( require ( github.com/aws/aws-sdk-go v1.55.3 // indirect github.com/bramvdbogaerde/go-scp v1.5.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hpcloud/tail v1.0.0 // indirect + github.com/jlaffaye/ftp v0.2.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect golang.org/x/crypto v0.18.0 // indirect diff --git a/go.sum b/go.sum index e079d64..359196b 100644 --- a/go.sum +++ b/go.sum @@ -12,10 +12,17 @@ github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg= +github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= diff --git a/pkg/backup.go b/pkg/backup.go index 6243e5a..21ad2ac 100644 --- a/pkg/backup.go +++ b/pkg/backup.go @@ -81,7 +81,8 @@ func BackupTask(db *dbConfig, config *BackupConfig) { case "ssh", "remote": sshBackup(db, config) case "ftp": - utils.Fatal("Not supported storage type: %s", config.storage) + ftpBackup(db, config) + //utils.Fatal("Not supported storage type: %s", config.storage) default: localBackup(db, config) } @@ -196,7 +197,7 @@ func s3Backup(db *dbConfig, config *BackupConfig) { utils.Info("Uploading backup archive to remote storage S3 ... ") utils.Info("Backup name is %s", finalFileName) - err := utils.UploadFileToS3(tmpPath, finalFileName, bucket, s3Path) + err := UploadFileToS3(tmpPath, finalFileName, bucket, s3Path) if err != nil { utils.Fatal("Error uploading backup archive to S3: %s ", err) @@ -210,7 +211,7 @@ func s3Backup(db *dbConfig, config *BackupConfig) { } // Delete old backup if config.prune { - err := utils.DeleteOldBackup(bucket, s3Path, config.backupRetention) + err := DeleteOldBackup(bucket, s3Path, config.backupRetention) if err != nil { utils.Fatal("Error deleting old backup from S3: %s ", err) } @@ -256,6 +257,41 @@ func sshBackup(db *dbConfig, config *BackupConfig) { //Delete temp deleteTemp() } +func ftpBackup(db *dbConfig, config *BackupConfig) { + utils.Info("Backup database to the remote FTP server") + //Backup database + BackupDatabase(db, config.backupFileName, disableCompression) + finalFileName := config.backupFileName + if config.encryption { + encryptBackup(config.backupFileName, config.passphrase) + finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") + } + utils.Info("Uploading backup archive to the remote FTP server ... ") + utils.Info("Backup name is %s", finalFileName) + err := CopyToFTP(finalFileName, config.remotePath) + if err != nil { + utils.Fatal("Error uploading file to the remote FTP server: %s ", err) + + } + + //Delete backup file from tmp folder + err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName)) + if err != nil { + utils.Error("Error deleting file: %v", err) + + } + if config.prune { + //TODO: Delete old backup from remote server + utils.Info("Deleting old backup from a remote server is not implemented yet") + + } + + utils.Done("Uploading backup archive to the remote FTP server ... done ") + //Send notification + utils.NotifySuccess(finalFileName) + //Delete temp + deleteTemp() +} func encryptBackup(backupFileName, gpqPassphrase string) { err := Encrypt(filepath.Join(tmpPath, backupFileName), gpqPassphrase) diff --git a/pkg/config.go b/pkg/config.go index 942008e..c2136a9 100644 --- a/pkg/config.go +++ b/pkg/config.go @@ -44,6 +44,29 @@ type BackupConfig struct { storage string cronExpression string } +type FTPConfig struct { + host string + user string + password string + port string + remotePath string +} + +func initFtpConfig() *FTPConfig { + //Initialize backup configs + fConfig := FTPConfig{} + fConfig.host = os.Getenv("FTP_HOST_NAME") + fConfig.user = os.Getenv("FTP_USER") + fConfig.password = os.Getenv("FTP_PASSWORD") + fConfig.port = os.Getenv("FTP_PORT") + fConfig.remotePath = os.Getenv("REMOTE_PATH") + err := utils.CheckEnvVars(ftpVars) + if err != nil { + utils.Error("Please make sure all required environment variables for FTP are set") + utils.Fatal("Error checking environment variables: %s", err) + } + return &fConfig +} func initDbConfig(cmd *cobra.Command) *dbConfig { //Set env @@ -66,9 +89,9 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig { utils.SetEnv("STORAGE_PATH", storagePath) utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION") utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION") - + utils.GetEnv(cmd, "path", "REMOTE_PATH") //Get flag value and set env - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") backupRetention, _ := cmd.Flags().GetInt("keep-last") prune, _ := cmd.Flags().GetBool("prune") @@ -106,10 +129,11 @@ type RestoreConfig struct { func initRestoreConfig(cmd *cobra.Command) *RestoreConfig { utils.SetEnv("STORAGE_PATH", storagePath) + utils.GetEnv(cmd, "path", "REMOTE_PATH") //Get flag value and set env s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") - remotePath := utils.GetEnv(cmd, "path", "SSH_REMOTE_PATH") + remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") storage = utils.GetEnv(cmd, "storage", "STORAGE") file = utils.GetEnv(cmd, "file", "FILE_NAME") _, _ = cmd.Flags().GetString("mode") diff --git a/pkg/ftp.go b/pkg/ftp.go new file mode 100644 index 0000000..9ce9319 --- /dev/null +++ b/pkg/ftp.go @@ -0,0 +1,81 @@ +package pkg + +import ( + "fmt" + "github.com/jlaffaye/ftp" + "io" + "os" + "path/filepath" + "time" +) + +// initFtpClient initializes and authenticates an FTP client +func initFtpClient() (*ftp.ServerConn, error) { + ftpConfig := initFtpConfig() + ftpClient, err := ftp.Dial(fmt.Sprintf("%s:%s", ftpConfig.host, ftpConfig.port), ftp.DialWithTimeout(5*time.Second)) + if err != nil { + return nil, fmt.Errorf("failed to connect to FTP: %w", err) + } + + err = ftpClient.Login(ftpConfig.user, ftpConfig.password) + if err != nil { + return nil, fmt.Errorf("failed to log in to FTP: %w", err) + } + + return ftpClient, nil +} + +// CopyToFTP uploads a file to the remote FTP server +func CopyToFTP(fileName, remotePath string) (err error) { + ftpConfig := initFtpConfig() + ftpClient, err := initFtpClient() + if err != nil { + return err + } + defer ftpClient.Quit() + + filePath := filepath.Join(tmpPath, fileName) + file, err := os.Open(filePath) + if err != nil { + return fmt.Errorf("failed to open file %s: %w", fileName, err) + } + defer file.Close() + + remoteFilePath := filepath.Join(ftpConfig.remotePath, fileName) + err = ftpClient.Stor(remoteFilePath, file) + if err != nil { + return fmt.Errorf("failed to upload file %s: %w", fileName, err) + } + + return nil +} + +// CopyFromFTP downloads a file from the remote FTP server +func CopyFromFTP(fileName, remotePath string) (err error) { + ftpClient, err := initFtpClient() + if err != nil { + return err + } + defer ftpClient.Quit() + + remoteFilePath := filepath.Join(remotePath, fileName) + r, err := ftpClient.Retr(remoteFilePath) + if err != nil { + return fmt.Errorf("failed to retrieve file %s: %w", fileName, err) + } + defer r.Close() + + localFilePath := filepath.Join(tmpPath, fileName) + outFile, err := os.Create(localFilePath) + if err != nil { + return fmt.Errorf("failed to create local file %s: %w", fileName, err) + } + defer outFile.Close() + + _, err = io.Copy(outFile, r) + if err != nil { + return fmt.Errorf("failed to copy data to local file %s: %w", fileName, err) + } + + return nil +} diff --git a/pkg/restore.go b/pkg/restore.go index ca13d6a..970b527 100644 --- a/pkg/restore.go +++ b/pkg/restore.go @@ -30,7 +30,7 @@ func StartRestore(cmd *cobra.Command) { case "ssh": restoreFromRemote(dbConf, restoreConf.file, restoreConf.remotePath) case "ftp": - utils.Fatal("Restore from FTP is not yet supported") + restoreFromFTP(dbConf, restoreConf.file, restoreConf.remotePath) default: utils.Info("Restore database from local") copyToTmp(storagePath, restoreConf.file) @@ -40,7 +40,7 @@ func StartRestore(cmd *cobra.Command) { func restoreFromS3(db *dbConfig, file, bucket, s3Path string) { utils.Info("Restore database from s3") - err := utils.DownloadFile(tmpPath, file, bucket, s3Path) + err := DownloadFile(tmpPath, file, bucket, s3Path) if err != nil { utils.Fatal("Error download file from s3 %s %v ", file, err) } @@ -54,6 +54,14 @@ func restoreFromRemote(db *dbConfig, file, remotePath string) { } RestoreDatabase(db, file) } +func restoreFromFTP(db *dbConfig, file, remotePath string) { + utils.Info("Restore database from FTP server") + err := CopyFromFTP(file, remotePath) + if err != nil { + utils.Fatal("Error download file from FTP server: %s %v", filepath.Join(remotePath, file), err) + } + RestoreDatabase(db, file) +} // RestoreDatabase restore database func RestoreDatabase(db *dbConfig, file string) { @@ -93,11 +101,11 @@ func RestoreDatabase(db *dbConfig, file string) { testDatabaseConnection(db) utils.Info("Restoring database...") - extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file)) + extension := filepath.Ext(file) // Restore from compressed file / .sql.gz if extension == ".gz" { - str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName - _, err := exec.Command("bash", "-c", str).Output() + str := "zcat " + filepath.Join(tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName + _, err := exec.Command("sh", "-c", str).Output() if err != nil { utils.Fatal("Error, in restoring the database %v", err) } @@ -108,8 +116,8 @@ func RestoreDatabase(db *dbConfig, file string) { } else if extension == ".sql" { //Restore from sql file - str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName - _, err := exec.Command("bash", "-c", str).Output() + str := "cat " + filepath.Join(tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName + _, err := exec.Command("sh", "-c", str).Output() if err != nil { utils.Fatal("Error in restoring the database %v", err) } diff --git a/utils/s3.go b/pkg/s3.go similarity index 78% rename from utils/s3.go rename to pkg/s3.go index 57d5f8a..9a80c16 100644 --- a/utils/s3.go +++ b/pkg/s3.go @@ -1,10 +1,10 @@ -// Package utils / +// Package pkg /***** @author Jonas Kaninda @license MIT License @Copyright © 2024 Jonas Kaninda **/ -package utils +package pkg import ( "bytes" @@ -13,6 +13,7 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3manager" + "github.com/jkaninda/pg-bkup/utils" "net/http" "os" "path/filepath" @@ -32,20 +33,20 @@ func CreateSession() (*session.Session, error) { "AWS_REGION", } - endPoint := GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") - accessKey := GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") - secretKey := GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY") - _ = GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") + endPoint := utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") + accessKey := utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") + secretKey := utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY") + _ = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME") region := os.Getenv("AWS_REGION") awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL")) if err != nil { - Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err) + utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err) } - err = CheckEnvVars(awsVars) + err = utils.CheckEnvVars(awsVars) if err != nil { - Fatal("Error checking environment variables\n: %s", err) + utils.Fatal("Error checking environment variables\n: %s", err) } // Configure to use MinIO Server s3Config := &aws.Config{ @@ -105,10 +106,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error { if err != nil { return err } - Info("Download backup from S3 storage...") + utils.Info("Download backup from S3 storage...") file, err := os.Create(filepath.Join(destinationPath, key)) if err != nil { - Error("Failed to create file", err) + utils.Error("Failed to create file", err) return err } defer file.Close() @@ -122,10 +123,10 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error { Key: aws.String(objectKey), }) if err != nil { - Error("Failed to download file", err) + utils.Error("Failed to download file %s", key) return err } - Info("Backup downloaded: %s bytes size %s ", file.Name(), numBytes) + utils.Info("Backup downloaded: %s bytes size %s ", file.Name(), numBytes) return nil } @@ -155,18 +156,18 @@ func DeleteOldBackup(bucket, prefix string, retention int) error { Key: object.Key, }) if err != nil { - Info("Failed to delete object %s: %v", *object.Key, err) + utils.Info("Failed to delete object %s: %v", *object.Key, err) } else { - Info("Deleted object %s\n", *object.Key) + utils.Info("Deleted object %s\n", *object.Key) } } } return !lastPage }) if err != nil { - Error("Failed to list objects: %v", err) + utils.Error("Failed to list objects: %v", err) } - Info("Finished deleting old files.") + utils.Info("Finished deleting old files.") return nil } diff --git a/pkg/var.go b/pkg/var.go index 52dcf2e..2ec9f88 100644 --- a/pkg/var.go +++ b/pkg/var.go @@ -41,9 +41,15 @@ var targetDbConf *targetDbConfig // sshVars Required environment variables for SSH remote server storage var sshVars = []string{ "SSH_USER", - "SSH_REMOTE_PATH", "SSH_HOST_NAME", "SSH_PORT", + "REMOTE_PATH", +} +var ftpVars = []string{ + "FTP_HOST_NAME", + "FTP_USER", + "FTP_PASSWORD", + "FTP_PORT", } // AwsVars Required environment variables for AWS S3 storage