refactor: refactoring of code to meet all golangci-lint requirements

This commit is contained in:
Jonas Kaninda
2024-11-19 02:54:31 +01:00
parent d97a0aafea
commit 63101ae84f
20 changed files with 413 additions and 313 deletions

23
.github/workflows/lint.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: Lint
on:
push:
pull_request:
jobs:
lint:
name: Run on Ubuntu
runs-on: ubuntu-latest
steps:
- name: Clone the code
uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: '~1.22'
- name: Run linter
uses: golangci/golangci-lint-action@v6
with:
version: v1.61

44
.golangci.yml Normal file
View File

@@ -0,0 +1,44 @@
run:
timeout: 5m
allow-parallel-runners: true
issues:
# don't skip warning about doc comments
# don't exclude the default set of lint
exclude-use-default: false
# restore some of the defaults
# (fill in the rest as needed)
exclude-rules:
- path: "internal/*"
linters:
- dupl
- lll
- goimports
linters:
disable-all: true
enable:
- dupl
- errcheck
- copyloopvar
- ginkgolinter
- goconst
- gocyclo
- gofmt
- gosimple
- govet
- ineffassign
# - lll
- misspell
- nakedret
- prealloc
- revive
- staticcheck
- typecheck
- unconvert
- unparam
- unused
linters-settings:
revive:
rules:
- name: comment-spacings

View File

@@ -1,13 +1,9 @@
// Package cmd / // Package cmd /
/*****
@author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
package cmd package cmd
import ( import (
"github.com/jkaninda/pg-bkup/internal" "github.com/jkaninda/pg-bkup/internal"
"github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@@ -20,14 +16,14 @@ var BackupCmd = &cobra.Command{
if len(args) == 0 { if len(args) == 0 {
internal.StartBackup(cmd) internal.StartBackup(cmd)
} else { } else {
utils.Fatal(`"backup" accepts no argument %q`, args) logger.Fatal(`"backup" accepts no argument %q`, args)
} }
}, },
} }
func init() { func init() {
//Backup // Backup
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Define storage: local, s3, ssh, ftp") BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Define storage: local, s3, ssh, ftp")
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression") BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")

View File

@@ -1,5 +1,6 @@
// Package cmd / // Package cmd /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -8,7 +9,7 @@ package cmd
import ( import (
"github.com/jkaninda/pg-bkup/internal" "github.com/jkaninda/pg-bkup/internal"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@@ -19,7 +20,7 @@ var MigrateCmd = &cobra.Command{
if len(args) == 0 { if len(args) == 0 {
internal.StartMigration(cmd) internal.StartMigration(cmd)
} else { } else {
utils.Fatal(`"migrate" accepts no argument %q`, args) logger.Fatal(`"migrate" accepts no argument %q`, args)
} }

View File

@@ -1,5 +1,6 @@
// Package cmd / // Package cmd /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -8,6 +9,7 @@ package cmd
import ( import (
"github.com/jkaninda/pg-bkup/internal" "github.com/jkaninda/pg-bkup/internal"
"github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@@ -20,7 +22,7 @@ var RestoreCmd = &cobra.Command{
if len(args) == 0 { if len(args) == 0 {
internal.StartRestore(cmd) internal.StartRestore(cmd)
} else { } else {
utils.Fatal(`"restore" accepts no argument %q`, args) logger.Fatal(`"restore" accepts no argument %q`, args)
} }
@@ -28,7 +30,7 @@ var RestoreCmd = &cobra.Command{
} }
func init() { func init() {
//Restore // Restore
RestoreCmd.PersistentFlags().StringP("file", "f", "", "File name of database") RestoreCmd.PersistentFlags().StringP("file", "f", "", "File name of database")
RestoreCmd.PersistentFlags().StringP("storage", "s", "local", "Define storage: local, s3, ssh, ftp") RestoreCmd.PersistentFlags().StringP("storage", "s", "local", "Define storage: local, s3, ssh, ftp")
RestoreCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`") RestoreCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")

View File

@@ -1,5 +1,7 @@
// Package cmd / // Package cmd /
/***** /*
*
***
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda

View File

@@ -1,5 +1,6 @@
// Package cmd / // Package cmd /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda

View File

@@ -1,9 +1,3 @@
// Package internal /
/*****
@author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
package internal package internal
import ( import (
@@ -13,6 +7,7 @@ import (
"github.com/jkaninda/go-storage/pkg/local" "github.com/jkaninda/go-storage/pkg/local"
"github.com/jkaninda/go-storage/pkg/s3" "github.com/jkaninda/go-storage/pkg/s3"
"github.com/jkaninda/go-storage/pkg/ssh" "github.com/jkaninda/go-storage/pkg/ssh"
"github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/utils"
"github.com/robfig/cron/v3" "github.com/robfig/cron/v3"
"github.com/spf13/cobra" "github.com/spf13/cobra"
@@ -26,9 +21,9 @@ import (
func StartBackup(cmd *cobra.Command) { func StartBackup(cmd *cobra.Command) {
intro() intro()
//Initialize backup configs // Initialize backup configs
config := initBackupConfig(cmd) config := initBackupConfig(cmd)
//Load backup configuration file // Load backup configuration file
configFile, err := loadConfigFile() configFile, err := loadConfigFile()
if err != nil { if err != nil {
dbConf = initDbConfig(cmd) dbConf = initDbConfig(cmd)
@@ -38,7 +33,7 @@ func StartBackup(cmd *cobra.Command) {
if utils.IsValidCronExpression(config.cronExpression) { if utils.IsValidCronExpression(config.cronExpression) {
scheduledMode(dbConf, config) scheduledMode(dbConf, config)
} else { } else {
utils.Fatal("Cron expression is not valid: %s", config.cronExpression) logger.Fatal("Cron expression is not valid: %s", config.cronExpression)
} }
} }
} else { } else {
@@ -49,22 +44,22 @@ func StartBackup(cmd *cobra.Command) {
// scheduledMode Runs backup in scheduled mode // scheduledMode Runs backup in scheduled mode
func scheduledMode(db *dbConfig, config *BackupConfig) { func scheduledMode(db *dbConfig, config *BackupConfig) {
utils.Info("Running in Scheduled mode") logger.Info("Running in Scheduled mode")
utils.Info("Backup cron expression: %s", config.cronExpression) logger.Info("Backup cron expression: %s", config.cronExpression)
utils.Info("The next scheduled time is: %v", utils.CronNextTime(config.cronExpression).Format(timeFormat)) logger.Info("The next scheduled time is: %v", utils.CronNextTime(config.cronExpression).Format(timeFormat))
utils.Info("Storage type %s ", config.storage) logger.Info("Storage type %s ", config.storage)
//Test backup // Test backup
utils.Info("Testing backup configurations...") logger.Info("Testing backup configurations...")
BackupTask(db, config) BackupTask(db, config)
utils.Info("Testing backup configurations...done") logger.Info("Testing backup configurations...done")
utils.Info("Creating backup job...") logger.Info("Creating backup job...")
// Create a new cron instance // Create a new cron instance
c := cron.New() c := cron.New()
_, err := c.AddFunc(config.cronExpression, func() { _, err := c.AddFunc(config.cronExpression, func() {
BackupTask(db, config) BackupTask(db, config)
utils.Info("Next backup time is: %v", utils.CronNextTime(config.cronExpression).Format(timeFormat)) logger.Info("Next backup time is: %v", utils.CronNextTime(config.cronExpression).Format(timeFormat))
}) })
if err != nil { if err != nil {
@@ -72,8 +67,8 @@ func scheduledMode(db *dbConfig, config *BackupConfig) {
} }
// Start the cron scheduler // Start the cron scheduler
c.Start() c.Start()
utils.Info("Creating backup job...done") logger.Info("Creating backup job...done")
utils.Info("Backup job started") logger.Info("Backup job started")
defer c.Stop() defer c.Stop()
select {} select {}
} }
@@ -81,7 +76,7 @@ func scheduledMode(db *dbConfig, config *BackupConfig) {
// multiBackupTask backup multi database // multiBackupTask backup multi database
func multiBackupTask(databases []Database, bkConfig *BackupConfig) { func multiBackupTask(databases []Database, bkConfig *BackupConfig) {
for _, db := range databases { for _, db := range databases {
//Check if path is defined in config file // Check if path is defined in config file
if db.Path != "" { if db.Path != "" {
bkConfig.remotePath = db.Path bkConfig.remotePath = db.Path
} }
@@ -91,8 +86,8 @@ func multiBackupTask(databases []Database, bkConfig *BackupConfig) {
// BackupTask backups database // BackupTask backups database
func BackupTask(db *dbConfig, config *BackupConfig) { func BackupTask(db *dbConfig, config *BackupConfig) {
utils.Info("Starting backup task...") logger.Info("Starting backup task...")
//Generate file name // Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20060102_150405")) backupFileName := fmt.Sprintf("%s_%s.sql.gz", db.dbName, time.Now().Format("20060102_150405"))
if config.disableCompression { if config.disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20060102_150405")) backupFileName = fmt.Sprintf("%s_%s.sql", db.dbName, time.Now().Format("20060102_150405"))
@@ -107,18 +102,17 @@ func BackupTask(db *dbConfig, config *BackupConfig) {
sshBackup(db, config) sshBackup(db, config)
case "ftp", "FTP": case "ftp", "FTP":
ftpBackup(db, config) ftpBackup(db, config)
//utils.Fatal("Not supported storage type: %s", config.storage)
default: default:
localBackup(db, config) localBackup(db, config)
} }
} }
func startMultiBackup(bkConfig *BackupConfig, configFile string) { func startMultiBackup(bkConfig *BackupConfig, configFile string) {
utils.Info("Starting backup task...") logger.Info("Starting backup task...")
conf, err := readConf(configFile) conf, err := readConf(configFile)
if err != nil { if err != nil {
utils.Fatal("Error reading config file: %s", err) logger.Fatal("Error reading config file: %s", err)
} }
//Check if cronExpression is defined in config file // Check if cronExpression is defined in config file
if conf.CronExpression != "" { if conf.CronExpression != "" {
bkConfig.cronExpression = conf.CronExpression bkConfig.cronExpression = conf.CronExpression
} }
@@ -128,22 +122,22 @@ func startMultiBackup(bkConfig *BackupConfig, configFile string) {
} else { } else {
// Check if cronExpression is valid // Check if cronExpression is valid
if utils.IsValidCronExpression(bkConfig.cronExpression) { if utils.IsValidCronExpression(bkConfig.cronExpression) {
utils.Info("Running backup in Scheduled mode") logger.Info("Running backup in Scheduled mode")
utils.Info("Backup cron expression: %s", bkConfig.cronExpression) logger.Info("Backup cron expression: %s", bkConfig.cronExpression)
utils.Info("The next scheduled time is: %v", utils.CronNextTime(bkConfig.cronExpression).Format(timeFormat)) logger.Info("The next scheduled time is: %v", utils.CronNextTime(bkConfig.cronExpression).Format(timeFormat))
utils.Info("Storage type %s ", bkConfig.storage) logger.Info("Storage type %s ", bkConfig.storage)
//Test backup // Test backup
utils.Info("Testing backup configurations...") logger.Info("Testing backup configurations...")
multiBackupTask(conf.Databases, bkConfig) multiBackupTask(conf.Databases, bkConfig)
utils.Info("Testing backup configurations...done") logger.Info("Testing backup configurations...done")
utils.Info("Creating backup job...") logger.Info("Creating backup job...")
// Create a new cron instance // Create a new cron instance
c := cron.New() c := cron.New()
_, err := c.AddFunc(bkConfig.cronExpression, func() { _, err := c.AddFunc(bkConfig.cronExpression, func() {
multiBackupTask(conf.Databases, bkConfig) multiBackupTask(conf.Databases, bkConfig)
utils.Info("Next backup time is: %v", utils.CronNextTime(bkConfig.cronExpression).Format(timeFormat)) logger.Info("Next backup time is: %v", utils.CronNextTime(bkConfig.cronExpression).Format(timeFormat))
}) })
if err != nil { if err != nil {
@@ -151,13 +145,13 @@ func startMultiBackup(bkConfig *BackupConfig, configFile string) {
} }
// Start the cron scheduler // Start the cron scheduler
c.Start() c.Start()
utils.Info("Creating backup job...done") logger.Info("Creating backup job...done")
utils.Info("Backup job started") logger.Info("Backup job started")
defer c.Stop() defer c.Stop()
select {} select {}
} else { } else {
utils.Fatal("Cron expression is not valid: %s", bkConfig.cronExpression) logger.Fatal("Cron expression is not valid: %s", bkConfig.cronExpression)
} }
} }
@@ -168,7 +162,7 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
storagePath = os.Getenv("STORAGE_PATH") storagePath = os.Getenv("STORAGE_PATH")
utils.Info("Starting database backup...") logger.Info("Starting database backup...")
err := os.Setenv("PGPASSWORD", db.dbPassword) err := os.Setenv("PGPASSWORD", db.dbPassword)
if err != nil { if err != nil {
@@ -176,7 +170,7 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
} }
testDatabaseConnection(db) testDatabaseConnection(db)
// Backup Database database // Backup Database database
utils.Info("Backing up database...") logger.Info("Backing up database...")
// Verify is compression is disabled // Verify is compression is disabled
if disableCompression { if disableCompression {
@@ -196,7 +190,13 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
defer file.Close() defer func(file *os.File) {
err := file.Close()
if err != nil {
return
}
}(file)
_, err = file.Write(output) _, err = file.Write(output)
if err != nil { if err != nil {
@@ -219,7 +219,10 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
gzipCmd.Stdin = stdout gzipCmd.Stdin = stdout
// save output // save output
gzipCmd.Stdout, err = os.Create(filepath.Join(tmpPath, backupFileName)) gzipCmd.Stdout, err = os.Create(filepath.Join(tmpPath, backupFileName))
gzipCmd.Start() err2 := gzipCmd.Start()
if err2 != nil {
return
}
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
@@ -231,11 +234,11 @@ func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool
} }
} }
utils.Info("Database has been backed up") logger.Info("Database has been backed up")
} }
func localBackup(db *dbConfig, config *BackupConfig) { func localBackup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to local storage") logger.Info("Backup database to local storage")
startTime = time.Now().Format(utils.TimeFormat()) startTime = time.Now().Format(utils.TimeFormat())
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
@@ -245,20 +248,20 @@ func localBackup(db *dbConfig, config *BackupConfig) {
} }
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName)) fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
utils.Error("Error: %s", err) logger.Error("Error: %s", err)
} }
backupSize = fileInfo.Size() backupSize = fileInfo.Size()
utils.Info("Backup name is %s", finalFileName) logger.Info("Backup name is %s", finalFileName)
localStorage := local.NewStorage(local.Config{ localStorage := local.NewStorage(local.Config{
LocalPath: tmpPath, LocalPath: tmpPath,
RemotePath: storagePath, RemotePath: storagePath,
}) })
err = localStorage.Copy(finalFileName) err = localStorage.Copy(finalFileName)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
utils.Info("Backup saved in %s", filepath.Join(storagePath, finalFileName)) logger.Info("Backup saved in %s", filepath.Join(storagePath, finalFileName))
//Send notification // Send notification
utils.NotifySuccess(&utils.NotificationData{ utils.NotifySuccess(&utils.NotificationData{
File: finalFileName, File: finalFileName,
BackupSize: backupSize, BackupSize: backupSize,
@@ -268,36 +271,36 @@ func localBackup(db *dbConfig, config *BackupConfig) {
StartTime: startTime, StartTime: startTime,
EndTime: time.Now().Format(utils.TimeFormat()), EndTime: time.Now().Format(utils.TimeFormat()),
}) })
//Delete old backup // Delete old backup
if config.prune { if config.prune {
err = localStorage.Prune(config.backupRetention) err = localStorage.Prune(config.backupRetention)
if err != nil { if err != nil {
utils.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err) logger.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err)
} }
} }
//Delete temp // Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully") logger.Info("Backup completed successfully")
} }
func s3Backup(db *dbConfig, config *BackupConfig) { func s3Backup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to s3 storage") logger.Info("Backup database to s3 storage")
startTime = time.Now().Format(utils.TimeFormat()) startTime = time.Now().Format(utils.TimeFormat())
//Backup database // Backup database
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
} }
utils.Info("Uploading backup archive to remote storage S3 ... ") logger.Info("Uploading backup archive to remote storage S3 ... ")
awsConfig := initAWSConfig() awsConfig := initAWSConfig()
if config.remotePath == "" { if config.remotePath == "" {
config.remotePath = awsConfig.remotePath config.remotePath = awsConfig.remotePath
} }
utils.Info("Backup name is %s", finalFileName) logger.Info("Backup name is %s", finalFileName)
s3Storage, err := s3.NewStorage(s3.Config{ s3Storage, err := s3.NewStorage(s3.Config{
Endpoint: awsConfig.endpoint, Endpoint: awsConfig.endpoint,
Bucket: awsConfig.bucket, Bucket: awsConfig.bucket,
@@ -310,20 +313,20 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
if err != nil { if err != nil {
utils.Fatal("Error creating s3 storage: %s", err) logger.Fatal("Error creating s3 storage: %s", err)
} }
err = s3Storage.Copy(finalFileName) err = s3Storage.Copy(finalFileName)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
//Get backup info // Get backup info
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName)) fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
utils.Error("Error: %s", err) logger.Error("Error: %s", err)
} }
backupSize = fileInfo.Size() backupSize = fileInfo.Size()
//Delete backup file from tmp folder // Delete backup file from tmp folder
err = utils.DeleteFile(filepath.Join(tmpPath, config.backupFileName)) err = utils.DeleteFile(filepath.Join(tmpPath, config.backupFileName))
if err != nil { if err != nil {
fmt.Println("Error deleting file: ", err) fmt.Println("Error deleting file: ", err)
@@ -333,12 +336,12 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
if config.prune { if config.prune {
err := s3Storage.Prune(config.backupRetention) err := s3Storage.Prune(config.backupRetention)
if err != nil { if err != nil {
utils.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err) logger.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err)
} }
} }
utils.Info("Backup saved in %s", filepath.Join(config.remotePath, finalFileName)) logger.Info("Backup saved in %s", filepath.Join(config.remotePath, finalFileName))
utils.Info("Uploading backup archive to remote storage S3 ... done ") logger.Info("Uploading backup archive to remote storage S3 ... done ")
//Send notification // Send notification
utils.NotifySuccess(&utils.NotificationData{ utils.NotifySuccess(&utils.NotificationData{
File: finalFileName, File: finalFileName,
BackupSize: backupSize, BackupSize: backupSize,
@@ -348,26 +351,26 @@ func s3Backup(db *dbConfig, config *BackupConfig) {
StartTime: startTime, StartTime: startTime,
EndTime: time.Now().Format(utils.TimeFormat()), EndTime: time.Now().Format(utils.TimeFormat()),
}) })
//Delete temp // Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully") logger.Info("Backup completed successfully")
} }
func sshBackup(db *dbConfig, config *BackupConfig) { func sshBackup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to Remote server") logger.Info("Backup database to Remote server")
startTime = time.Now().Format(utils.TimeFormat()) startTime = time.Now().Format(utils.TimeFormat())
//Backup database // Backup database
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
} }
utils.Info("Uploading backup archive to remote storage ... ") logger.Info("Uploading backup archive to remote storage ... ")
utils.Info("Backup name is %s", finalFileName) logger.Info("Backup name is %s", finalFileName)
sshConfig, err := loadSSHConfig() sshConfig, err := loadSSHConfig()
if err != nil { if err != nil {
utils.Fatal("Error loading ssh config: %s", err) logger.Fatal("Error loading ssh config: %s", err)
} }
sshStorage, err := ssh.NewStorage(ssh.Config{ sshStorage, err := ssh.NewStorage(ssh.Config{
@@ -379,35 +382,35 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
if err != nil { if err != nil {
utils.Fatal("Error creating SSH storage: %s", err) logger.Fatal("Error creating SSH storage: %s", err)
} }
err = sshStorage.Copy(finalFileName) err = sshStorage.Copy(finalFileName)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
//Get backup info // Get backup info
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName)) fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
utils.Error("Error: %s", err) logger.Error("Error: %s", err)
} }
backupSize = fileInfo.Size() backupSize = fileInfo.Size()
utils.Info("Backup saved in %s", filepath.Join(config.remotePath, finalFileName)) logger.Info("Backup saved in %s", filepath.Join(config.remotePath, finalFileName))
//Delete backup file from tmp folder // Delete backup file from tmp folder
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName)) err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
utils.Error("Error deleting file: %v", err) logger.Error("Error deleting file: %v", err)
} }
if config.prune { if config.prune {
err := sshStorage.Prune(config.backupRetention) err := sshStorage.Prune(config.backupRetention)
if err != nil { if err != nil {
utils.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err) logger.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err)
} }
} }
utils.Info("Uploading backup archive to remote storage ... done ") logger.Info("Uploading backup archive to remote storage ... done ")
//Send notification // Send notification
utils.NotifySuccess(&utils.NotificationData{ utils.NotifySuccess(&utils.NotificationData{
File: finalFileName, File: finalFileName,
BackupSize: backupSize, BackupSize: backupSize,
@@ -417,24 +420,24 @@ func sshBackup(db *dbConfig, config *BackupConfig) {
StartTime: startTime, StartTime: startTime,
EndTime: time.Now().Format(utils.TimeFormat()), EndTime: time.Now().Format(utils.TimeFormat()),
}) })
//Delete temp // Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully") logger.Info("Backup completed successfully")
} }
func ftpBackup(db *dbConfig, config *BackupConfig) { func ftpBackup(db *dbConfig, config *BackupConfig) {
utils.Info("Backup database to the remote FTP server") logger.Info("Backup database to the remote FTP server")
startTime = time.Now().Format(utils.TimeFormat()) startTime = time.Now().Format(utils.TimeFormat())
//Backup database // Backup database
BackupDatabase(db, config.backupFileName, disableCompression) BackupDatabase(db, config.backupFileName, disableCompression)
finalFileName := config.backupFileName finalFileName := config.backupFileName
if config.encryption { if config.encryption {
encryptBackup(config) encryptBackup(config)
finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg") finalFileName = fmt.Sprintf("%s.%s", config.backupFileName, "gpg")
} }
utils.Info("Uploading backup archive to the remote FTP server ... ") logger.Info("Uploading backup archive to the remote FTP server ... ")
utils.Info("Backup name is %s", finalFileName) logger.Info("Backup name is %s", finalFileName)
ftpConfig := loadFtpConfig() ftpConfig := loadFtpConfig()
ftpStorage, err := ftp.NewStorage(ftp.Config{ ftpStorage, err := ftp.NewStorage(ftp.Config{
Host: ftpConfig.host, Host: ftpConfig.host,
@@ -445,36 +448,36 @@ func ftpBackup(db *dbConfig, config *BackupConfig) {
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
if err != nil { if err != nil {
utils.Fatal("Error creating SSH storage: %s", err) logger.Fatal("Error creating SSH storage: %s", err)
} }
err = ftpStorage.Copy(finalFileName) err = ftpStorage.Copy(finalFileName)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
utils.Info("Backup saved in %s", filepath.Join(config.remotePath, finalFileName)) logger.Info("Backup saved in %s", filepath.Join(config.remotePath, finalFileName))
//Get backup info // Get backup info
fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName)) fileInfo, err := os.Stat(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
utils.Error("Error: %s", err) logger.Error("Error: %s", err)
} }
backupSize = fileInfo.Size() backupSize = fileInfo.Size()
//Delete backup file from tmp folder // Delete backup file from tmp folder
err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName)) err = utils.DeleteFile(filepath.Join(tmpPath, finalFileName))
if err != nil { if err != nil {
utils.Error("Error deleting file: %v", err) logger.Error("Error deleting file: %v", err)
} }
if config.prune { if config.prune {
err := ftpStorage.Prune(config.backupRetention) err := ftpStorage.Prune(config.backupRetention)
if err != nil { if err != nil {
utils.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err) logger.Fatal("Error deleting old backup from %s storage: %s ", config.storage, err)
} }
} }
utils.Info("Uploading backup archive to the remote FTP server ... done ") logger.Info("Uploading backup archive to the remote FTP server ... done ")
//Send notification // Send notification
utils.NotifySuccess(&utils.NotificationData{ utils.NotifySuccess(&utils.NotificationData{
File: finalFileName, File: finalFileName,
BackupSize: backupSize, BackupSize: backupSize,
@@ -484,36 +487,36 @@ func ftpBackup(db *dbConfig, config *BackupConfig) {
StartTime: startTime, StartTime: startTime,
EndTime: time.Now().Format(utils.TimeFormat()), EndTime: time.Now().Format(utils.TimeFormat()),
}) })
//Delete temp // Delete temp
deleteTemp() deleteTemp()
utils.Info("Backup completed successfully") logger.Info("Backup completed successfully")
} }
func encryptBackup(config *BackupConfig) { func encryptBackup(config *BackupConfig) {
backupFile, err := os.ReadFile(filepath.Join(tmpPath, config.backupFileName)) backupFile, err := os.ReadFile(filepath.Join(tmpPath, config.backupFileName))
outputFile := fmt.Sprintf("%s.%s", filepath.Join(tmpPath, config.backupFileName), gpgExtension) outputFile := fmt.Sprintf("%s.%s", filepath.Join(tmpPath, config.backupFileName), gpgExtension)
if err != nil { if err != nil {
utils.Fatal("Error reading backup file: %s ", err) logger.Fatal("Error reading backup file: %s ", err)
} }
if config.usingKey { if config.usingKey {
utils.Info("Encrypting backup using public key...") logger.Info("Encrypting backup using public key...")
pubKey, err := os.ReadFile(config.publicKey) pubKey, err := os.ReadFile(config.publicKey)
if err != nil { if err != nil {
utils.Fatal("Error reading public key: %s ", err) logger.Fatal("Error reading public key: %s ", err)
} }
err = encryptor.EncryptWithPublicKey(backupFile, fmt.Sprintf("%s.%s", filepath.Join(tmpPath, config.backupFileName), gpgExtension), pubKey) err = encryptor.EncryptWithPublicKey(backupFile, fmt.Sprintf("%s.%s", filepath.Join(tmpPath, config.backupFileName), gpgExtension), pubKey)
if err != nil { if err != nil {
utils.Fatal("Error encrypting backup file: %v ", err) logger.Fatal("Error encrypting backup file: %v ", err)
} }
utils.Info("Encrypting backup using public key...done") logger.Info("Encrypting backup using public key...done")
} else if config.passphrase != "" { } else if config.passphrase != "" {
utils.Info("Encrypting backup using passphrase...") logger.Info("Encrypting backup using passphrase...")
err := encryptor.Encrypt(backupFile, outputFile, config.passphrase) err := encryptor.Encrypt(backupFile, outputFile, config.passphrase)
if err != nil { if err != nil {
utils.Fatal("error during encrypting backup %v", err) logger.Fatal("error during encrypting backup %v", err)
} }
utils.Info("Encrypting backup using passphrase...done") logger.Info("Encrypting backup using passphrase...done")
} }

View File

@@ -1,5 +1,6 @@
// Package internal / // Package internal /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -8,6 +9,7 @@ package internal
import ( import (
"fmt" "fmt"
"github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"os" "os"
@@ -86,7 +88,7 @@ type AWSConfig struct {
} }
func initDbConfig(cmd *cobra.Command) *dbConfig { func initDbConfig(cmd *cobra.Command) *dbConfig {
//Set env // Set env
utils.GetEnv(cmd, "dbname", "DB_NAME") utils.GetEnv(cmd, "dbname", "DB_NAME")
dConf := dbConfig{} dConf := dbConfig{}
dConf.dbHost = os.Getenv("DB_HOST") dConf.dbHost = os.Getenv("DB_HOST")
@@ -97,8 +99,8 @@ func initDbConfig(cmd *cobra.Command) *dbConfig {
err := utils.CheckEnvVars(dbHVars) err := utils.CheckEnvVars(dbHVars)
if err != nil { if err != nil {
utils.Error("Please make sure all required environment variables for database are set") logger.Error("Please make sure all required environment variables for database are set")
utils.Fatal("Error checking environment variables: %s", err) logger.Fatal("Error checking environment variables: %s", err)
} }
return &dConf return &dConf
} }
@@ -131,7 +133,7 @@ func loadSSHConfig() (*SSHConfig, error) {
}, nil }, nil
} }
func loadFtpConfig() *FTPConfig { func loadFtpConfig() *FTPConfig {
//Initialize data configs // Initialize data configs
fConfig := FTPConfig{} fConfig := FTPConfig{}
fConfig.host = utils.GetEnvVariable("FTP_HOST", "FTP_HOST_NAME") fConfig.host = utils.GetEnvVariable("FTP_HOST", "FTP_HOST_NAME")
fConfig.user = os.Getenv("FTP_USER") fConfig.user = os.Getenv("FTP_USER")
@@ -140,13 +142,13 @@ func loadFtpConfig() *FTPConfig {
fConfig.remotePath = os.Getenv("REMOTE_PATH") fConfig.remotePath = os.Getenv("REMOTE_PATH")
err := utils.CheckEnvVars(ftpVars) err := utils.CheckEnvVars(ftpVars)
if err != nil { if err != nil {
utils.Error("Please make sure all required environment variables for FTP are set") logger.Error("Please make sure all required environment variables for FTP are set")
utils.Fatal("Error missing environment variables: %s", err) logger.Fatal("Error missing environment variables: %s", err)
} }
return &fConfig return &fConfig
} }
func initAWSConfig() *AWSConfig { func initAWSConfig() *AWSConfig {
//Initialize AWS configs // Initialize AWS configs
aConfig := AWSConfig{} aConfig := AWSConfig{}
aConfig.endpoint = utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT") aConfig.endpoint = utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
aConfig.accessKey = utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY") aConfig.accessKey = utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
@@ -167,8 +169,8 @@ func initAWSConfig() *AWSConfig {
aConfig.forcePathStyle = forcePathStyle aConfig.forcePathStyle = forcePathStyle
err = utils.CheckEnvVars(awsVars) err = utils.CheckEnvVars(awsVars)
if err != nil { if err != nil {
utils.Error("Please make sure all required environment variables for AWS S3 are set") logger.Error("Please make sure all required environment variables for AWS S3 are set")
utils.Fatal("Error checking environment variables: %s", err) logger.Fatal("Error checking environment variables: %s", err)
} }
return &aConfig return &aConfig
} }
@@ -176,7 +178,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
utils.SetEnv("STORAGE_PATH", storagePath) utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION") utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
utils.GetEnv(cmd, "path", "REMOTE_PATH") utils.GetEnv(cmd, "path", "REMOTE_PATH")
//Get flag value and set env // Get flag value and set env
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE") storage = utils.GetEnv(cmd, "storage", "STORAGE")
prune := false prune := false
@@ -198,7 +200,7 @@ func initBackupConfig(cmd *cobra.Command) *BackupConfig {
encryption = true encryption = true
usingKey = false usingKey = false
} }
//Initialize backup configs // Initialize backup configs
config := BackupConfig{} config := BackupConfig{}
config.backupRetention = backupRetention config.backupRetention = backupRetention
config.disableCompression = disableCompression config.disableCompression = disableCompression
@@ -228,7 +230,7 @@ func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
utils.SetEnv("STORAGE_PATH", storagePath) utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "path", "REMOTE_PATH") utils.GetEnv(cmd, "path", "REMOTE_PATH")
//Get flag value and set env // Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH") s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH") remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE") storage = utils.GetEnv(cmd, "storage", "STORAGE")
@@ -242,7 +244,7 @@ func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
usingKey = false usingKey = false
} }
//Initialize restore configs // Initialize restore configs
rConfig := RestoreConfig{} rConfig := RestoreConfig{}
rConfig.s3Path = s3Path rConfig.s3Path = s3Path
rConfig.remotePath = remotePath rConfig.remotePath = remotePath
@@ -265,8 +267,8 @@ func initTargetDbConfig() *targetDbConfig {
err := utils.CheckEnvVars(tdbRVars) err := utils.CheckEnvVars(tdbRVars)
if err != nil { if err != nil {
utils.Error("Please make sure all required environment variables for the target database are set") logger.Error("Please make sure all required environment variables for the target database are set")
utils.Fatal("Error checking target database environment variables: %s", err) logger.Fatal("Error checking target database environment variables: %s", err)
} }
return &tdbConfig return &tdbConfig
} }

View File

@@ -1,5 +1,6 @@
// Package internal / // Package internal /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -9,6 +10,7 @@ package internal
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/utils"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
"os" "os"
@@ -18,13 +20,13 @@ import (
) )
func intro() { func intro() {
utils.Info("Starting PostgreSQL Backup...") logger.Info("Starting PostgreSQL Backup...")
utils.Info("Copyright (c) 2024 Jonas Kaninda ") logger.Info("Copyright (c) 2024 Jonas Kaninda ")
} }
// copyToTmp copy file to temporary directory // copyToTmp copy file to temporary directory
func deleteTemp() { func deleteTemp() {
utils.Info("Deleting %s ...", tmpPath) logger.Info("Deleting %s ...", tmpPath)
err := filepath.Walk(tmpPath, func(path string, info os.FileInfo, err error) error { err := filepath.Walk(tmpPath, func(path string, info os.FileInfo, err error) error {
if err != nil { if err != nil {
return err return err
@@ -40,16 +42,16 @@ func deleteTemp() {
return nil return nil
}) })
if err != nil { if err != nil {
utils.Error("Error deleting files: %v", err) logger.Error("Error deleting files: %v", err)
} else { } else {
utils.Info("Deleting %s ... done", tmpPath) logger.Info("Deleting %s ... done", tmpPath)
} }
} }
// TestDatabaseConnection tests the database connection // TestDatabaseConnection tests the database connection
func testDatabaseConnection(db *dbConfig) { func testDatabaseConnection(db *dbConfig) {
utils.Info("Connecting to %s database ...", db.dbName) logger.Info("Connecting to %s database ...", db.dbName)
// Test database connection // Test database connection
query := "SELECT version();" query := "SELECT version();"
@@ -74,10 +76,10 @@ func testDatabaseConnection(db *dbConfig) {
// Run the command and capture any errors // Run the command and capture any errors
err = cmd.Run() err = cmd.Run()
if err != nil { if err != nil {
utils.Fatal("Error running psql command: %v\nOutput: %s\n", err, out.String()) logger.Fatal("Error running psql command: %v\nOutput: %s\n", err, out.String())
return return
} }
utils.Info("Successfully connected to %s database", db.dbName) logger.Info("Successfully connected to %s database", db.dbName)
} }

View File

@@ -1,5 +1,6 @@
// Package internal / // Package internal /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -8,19 +9,19 @@ package internal
import ( import (
"fmt" "fmt"
"github.com/jkaninda/pg-bkup/utils" "github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"time" "time"
) )
func StartMigration(cmd *cobra.Command) { func StartMigration(cmd *cobra.Command) {
intro() intro()
utils.Info("Starting database migration...") logger.Info("Starting database migration...")
//Get DB config // Get DB config
dbConf = initDbConfig(cmd) dbConf = initDbConfig(cmd)
targetDbConf = initTargetDbConfig() targetDbConf = initTargetDbConfig()
//Defining the target database variables // Defining the target database variables
newDbConfig := dbConfig{} newDbConfig := dbConfig{}
newDbConfig.dbHost = targetDbConf.targetDbHost newDbConfig.dbHost = targetDbConf.targetDbHost
newDbConfig.dbPort = targetDbConf.targetDbPort newDbConfig.dbPort = targetDbConf.targetDbPort
@@ -28,15 +29,15 @@ func StartMigration(cmd *cobra.Command) {
newDbConfig.dbUserName = targetDbConf.targetDbUserName newDbConfig.dbUserName = targetDbConf.targetDbUserName
newDbConfig.dbPassword = targetDbConf.targetDbPassword newDbConfig.dbPassword = targetDbConf.targetDbPassword
//Generate file name // Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405")) backupFileName := fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
conf := &RestoreConfig{} conf := &RestoreConfig{}
conf.file = backupFileName conf.file = backupFileName
//Backup source Database // Backup source Database
BackupDatabase(dbConf, backupFileName, true) BackupDatabase(dbConf, backupFileName, true)
//Restore source database into target database // Restore source database into target database
utils.Info("Restoring [%s] database into [%s] database...", dbConf.dbName, targetDbConf.targetDbName) logger.Info("Restoring [%s] database into [%s] database...", dbConf.dbName, targetDbConf.targetDbName)
RestoreDatabase(&newDbConfig, conf) RestoreDatabase(&newDbConfig, conf)
utils.Info("[%s] database has been restored into [%s] database", dbConf.dbName, targetDbConf.targetDbName) logger.Info("[%s] database has been restored into [%s] database", dbConf.dbName, targetDbConf.targetDbName)
utils.Info("Database migration completed.") logger.Info("Database migration completed.")
} }

View File

@@ -1,5 +1,6 @@
// Package internal / // Package internal /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -7,6 +8,7 @@
package internal package internal
import ( import (
"github.com/jkaninda/pg-bkup/pkg/logger"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
@@ -39,20 +41,20 @@ func StartRestore(cmd *cobra.Command) {
} }
} }
func localRestore(dbConf *dbConfig, restoreConf *RestoreConfig) { func localRestore(dbConf *dbConfig, restoreConf *RestoreConfig) {
utils.Info("Restore database from local") logger.Info("Restore database from local")
localStorage := local.NewStorage(local.Config{ localStorage := local.NewStorage(local.Config{
RemotePath: storagePath, RemotePath: storagePath,
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
err := localStorage.CopyFrom(restoreConf.file) err := localStorage.CopyFrom(restoreConf.file)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
RestoreDatabase(dbConf, restoreConf) RestoreDatabase(dbConf, restoreConf)
} }
func restoreFromS3(db *dbConfig, conf *RestoreConfig) { func restoreFromS3(db *dbConfig, conf *RestoreConfig) {
utils.Info("Restore database from s3") logger.Info("Restore database from s3")
awsConfig := initAWSConfig() awsConfig := initAWSConfig()
if conf.remotePath == "" { if conf.remotePath == "" {
conf.remotePath = awsConfig.remotePath conf.remotePath = awsConfig.remotePath
@@ -69,19 +71,19 @@ func restoreFromS3(db *dbConfig, conf *RestoreConfig) {
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
if err != nil { if err != nil {
utils.Fatal("Error creating s3 storage: %s", err) logger.Fatal("Error creating s3 storage: %s", err)
} }
err = s3Storage.CopyFrom(conf.file) err = s3Storage.CopyFrom(conf.file)
if err != nil { if err != nil {
utils.Fatal("Error download file from S3 storage: %s", err) logger.Fatal("Error download file from S3 storage: %s", err)
} }
RestoreDatabase(db, conf) RestoreDatabase(db, conf)
} }
func restoreFromRemote(db *dbConfig, conf *RestoreConfig) { func restoreFromRemote(db *dbConfig, conf *RestoreConfig) {
utils.Info("Restore database from remote server") logger.Info("Restore database from remote server")
sshConfig, err := loadSSHConfig() sshConfig, err := loadSSHConfig()
if err != nil { if err != nil {
utils.Fatal("Error loading ssh config: %s", err) logger.Fatal("Error loading ssh config: %s", err)
} }
sshStorage, err := ssh.NewStorage(ssh.Config{ sshStorage, err := ssh.NewStorage(ssh.Config{
@@ -94,16 +96,16 @@ func restoreFromRemote(db *dbConfig, conf *RestoreConfig) {
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
if err != nil { if err != nil {
utils.Fatal("Error creating SSH storage: %s", err) logger.Fatal("Error creating SSH storage: %s", err)
} }
err = sshStorage.CopyFrom(conf.file) err = sshStorage.CopyFrom(conf.file)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
RestoreDatabase(db, conf) RestoreDatabase(db, conf)
} }
func restoreFromFTP(db *dbConfig, conf *RestoreConfig) { func restoreFromFTP(db *dbConfig, conf *RestoreConfig) {
utils.Info("Restore database from FTP server") logger.Info("Restore database from FTP server")
ftpConfig := loadFtpConfig() ftpConfig := loadFtpConfig()
ftpStorage, err := ftp.NewStorage(ftp.Config{ ftpStorage, err := ftp.NewStorage(ftp.Config{
Host: ftpConfig.host, Host: ftpConfig.host,
@@ -114,11 +116,11 @@ func restoreFromFTP(db *dbConfig, conf *RestoreConfig) {
LocalPath: tmpPath, LocalPath: tmpPath,
}) })
if err != nil { if err != nil {
utils.Fatal("Error creating SSH storage: %s", err) logger.Fatal("Error creating SSH storage: %s", err)
} }
err = ftpStorage.CopyFrom(conf.file) err = ftpStorage.CopyFrom(conf.file)
if err != nil { if err != nil {
utils.Fatal("Error copying backup file: %s", err) logger.Fatal("Error copying backup file: %s", err)
} }
RestoreDatabase(db, conf) RestoreDatabase(db, conf)
} }
@@ -126,43 +128,43 @@ func restoreFromFTP(db *dbConfig, conf *RestoreConfig) {
// RestoreDatabase restore database // RestoreDatabase restore database
func RestoreDatabase(db *dbConfig, conf *RestoreConfig) { func RestoreDatabase(db *dbConfig, conf *RestoreConfig) {
if conf.file == "" { if conf.file == "" {
utils.Fatal("Error, file required") logger.Fatal("Error, file required")
} }
extension := filepath.Ext(filepath.Join(tmpPath, conf.file)) extension := filepath.Ext(filepath.Join(tmpPath, conf.file))
rFile, err := os.ReadFile(filepath.Join(tmpPath, conf.file)) rFile, err := os.ReadFile(filepath.Join(tmpPath, conf.file))
outputFile := RemoveLastExtension(filepath.Join(tmpPath, conf.file)) outputFile := RemoveLastExtension(filepath.Join(tmpPath, conf.file))
if err != nil { if err != nil {
utils.Fatal("Error reading backup file: %s ", err) logger.Fatal("Error reading backup file: %s ", err)
} }
if extension == ".gpg" { if extension == ".gpg" {
if conf.usingKey { if conf.usingKey {
utils.Info("Decrypting backup using private key...") logger.Info("Decrypting backup using private key...")
utils.Warn("Backup decryption using a private key is not fully supported") logger.Warn("Backup decryption using a private key is not fully supported")
prKey, err := os.ReadFile(conf.privateKey) prKey, err := os.ReadFile(conf.privateKey)
if err != nil { if err != nil {
utils.Fatal("Error reading public key: %s ", err) logger.Fatal("Error reading public key: %s ", err)
} }
err = encryptor.DecryptWithPrivateKey(rFile, outputFile, prKey, conf.passphrase) err = encryptor.DecryptWithPrivateKey(rFile, outputFile, prKey, conf.passphrase)
if err != nil { if err != nil {
utils.Fatal("error during decrypting backup %v", err) logger.Fatal("error during decrypting backup %v", err)
} }
utils.Info("Decrypting backup using private key...done") logger.Info("Decrypting backup using private key...done")
} else { } else {
if conf.passphrase == "" { if conf.passphrase == "" {
utils.Error("Error, passphrase or private key required") logger.Error("Error, passphrase or private key required")
utils.Fatal("Your file seems to be a GPG file.\nYou need to provide GPG keys. GPG_PASSPHRASE or GPG_PRIVATE_KEY environment variable is required.") logger.Fatal("Your file seems to be a GPG file.\nYou need to provide GPG keys. GPG_PASSPHRASE or GPG_PRIVATE_KEY environment variable is required.")
} else { } else {
utils.Info("Decrypting backup using passphrase...") logger.Info("Decrypting backup using passphrase...")
//decryptWithGPG file // decryptWithGPG file
err := encryptor.Decrypt(rFile, outputFile, conf.passphrase) err := encryptor.Decrypt(rFile, outputFile, conf.passphrase)
if err != nil { if err != nil {
utils.Fatal("Error decrypting file %s %v", file, err) logger.Fatal("Error decrypting file %s %v", file, err)
} }
utils.Info("Decrypting backup using passphrase...done") logger.Info("Decrypting backup using passphrase...done")
//Update file name // Update file name
conf.file = RemoveLastExtension(file) conf.file = RemoveLastExtension(file)
} }
} }
@@ -176,7 +178,7 @@ func RestoreDatabase(db *dbConfig, conf *RestoreConfig) {
return return
} }
testDatabaseConnection(db) testDatabaseConnection(db)
utils.Info("Restoring database...") logger.Info("Restoring database...")
extension := filepath.Ext(conf.file) extension := filepath.Ext(conf.file)
// Restore from compressed file / .sql.gz // Restore from compressed file / .sql.gz
@@ -184,29 +186,29 @@ func RestoreDatabase(db *dbConfig, conf *RestoreConfig) {
str := "zcat " + filepath.Join(tmpPath, conf.file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName str := "zcat " + filepath.Join(tmpPath, conf.file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName
_, err := exec.Command("sh", "-c", str).Output() _, err := exec.Command("sh", "-c", str).Output()
if err != nil { if err != nil {
utils.Fatal("Error, in restoring the database %v", err) logger.Fatal("Error, in restoring the database %v", err)
} }
utils.Info("Restoring database... done") logger.Info("Restoring database... done")
utils.Info("Database has been restored") logger.Info("Database has been restored")
//Delete temp // Delete temp
deleteTemp() deleteTemp()
} else if extension == ".sql" { } else if extension == ".sql" {
//Restore from sql file // Restore from sql file
str := "cat " + filepath.Join(tmpPath, conf.file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName str := "cat " + filepath.Join(tmpPath, conf.file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName
_, err := exec.Command("sh", "-c", str).Output() _, err := exec.Command("sh", "-c", str).Output()
if err != nil { if err != nil {
utils.Fatal("Error in restoring the database %v", err) logger.Fatal("Error in restoring the database %v", err)
} }
utils.Info("Restoring database... done") logger.Info("Restoring database... done")
utils.Info("Database has been restored") logger.Info("Database has been restored")
//Delete temp // Delete temp
deleteTemp() deleteTemp()
} else { } else {
utils.Fatal("Unknown file extension: %s", extension) logger.Fatal("Unknown file extension: %s", extension)
} }
} else { } else {
utils.Fatal("File not found in %s", filepath.Join(tmpPath, conf.file)) logger.Fatal("File not found in %s", filepath.Join(tmpPath, conf.file))
} }
} }

View File

@@ -1,14 +1,8 @@
// Package internal / // Package internal /
/*****
@author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
package internal package internal
const tmpPath = "/tmp/backup" const tmpPath = "/tmp/backup"
const gpgHome = "/config/gnupg" const gpgHome = "/config/gnupg"
const algorithm = "aes256"
const gpgExtension = "gpg" const gpgExtension = "gpg"
const timeFormat = "2006-01-02 at 15:04:05" const timeFormat = "2006-01-02 at 15:04:05"
@@ -44,13 +38,6 @@ var tdbRVars = []string{
var dbConf *dbConfig var dbConf *dbConfig
var targetDbConf *targetDbConfig var targetDbConf *targetDbConfig
// sshVars Required environment variables for SSH remote server storage
var sshVars = []string{
"SSH_USER",
"SSH_HOST_NAME",
"SSH_PORT",
"REMOTE_PATH",
}
var ftpVars = []string{ var ftpVars = []string{
"FTP_HOST_NAME", "FTP_HOST_NAME",
"FTP_USER", "FTP_USER",

View File

@@ -1,7 +1,8 @@
// Package main / // Package main /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
**/ **/
package main package main

73
pkg/logger/logger.go Normal file
View File

@@ -0,0 +1,73 @@
package logger
import (
"fmt"
"log"
"os"
"runtime"
"strings"
)
// Info returns info log
func Info(msg string, args ...interface{}) {
log.SetOutput(getStd("/dev/stdout"))
logWithCaller("INFO", msg, args...)
}
// Warn returns warning log
func Warn(msg string, args ...interface{}) {
log.SetOutput(getStd("/dev/stdout"))
logWithCaller("WARN", msg, args...)
}
// Error logs error messages
func Error(msg string, args ...interface{}) {
log.SetOutput(getStd("/dev/stderr"))
logWithCaller("ERROR", msg, args...)
}
func Fatal(msg string, args ...interface{}) {
log.SetOutput(os.Stdout)
logWithCaller("ERROR", msg, args...)
os.Exit(1)
}
// Helper function to format and log messages with file and line number
func logWithCaller(level, msg string, args ...interface{}) {
// Format message if there are additional arguments
formattedMessage := msg
if len(args) > 0 {
formattedMessage = fmt.Sprintf(msg, args...)
}
// Get the caller's file and line number (skip 2 frames)
_, file, line, ok := runtime.Caller(2)
if !ok {
file = "unknown"
line = 0
}
// Log message with caller information if GOMA_LOG_LEVEL is trace
if strings.ToLower(level) != "off" {
if strings.ToLower(level) == traceLog {
log.Printf("%s: %s (File: %s, Line: %d)\n", level, formattedMessage, file, line)
} else {
log.Printf("%s: %s\n", level, formattedMessage)
}
}
}
func getStd(out string) *os.File {
switch out {
case "/dev/stdout":
return os.Stdout
case "/dev/stderr":
return os.Stderr
case "/dev/stdin":
return os.Stdin
default:
return os.Stdout
}
}

3
pkg/logger/var.go Normal file
View File

@@ -0,0 +1,3 @@
package logger
const traceLog = "trace"

View File

@@ -1,9 +1,4 @@
// Package utils / // Package utils /
/*****
@author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
package utils package utils
import "os" import "os"

View File

@@ -1,62 +0,0 @@
// Package utils /
/*****
@author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda
**/
package utils
import (
"fmt"
"log"
"os"
)
// Info message
func Info(msg string, args ...any) {
log.SetOutput(os.Stdout)
formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 {
log.Printf("INFO: %s\n", msg)
} else {
log.Printf("INFO: %s\n", formattedMessage)
}
}
// Warn a Warning message
func Warn(msg string, args ...any) {
log.SetOutput(os.Stdout)
formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 {
log.Printf("WARN: %s\n", msg)
} else {
log.Printf("WARN: %s\n", formattedMessage)
}
}
// Error error message
func Error(msg string, args ...any) {
log.SetOutput(os.Stdout)
formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 {
log.Printf("ERROR: %s\n", msg)
} else {
log.Printf("ERROR: %s\n", formattedMessage)
}
}
func Fatal(msg string, args ...any) {
log.SetOutput(os.Stdout)
// Fatal logs an error message and exits the program.
formattedMessage := fmt.Sprintf(msg, args...)
if len(args) == 0 {
log.Printf("ERROR: %s\n", msg)
NotifyError(msg)
} else {
log.Printf("ERROR: %s\n", formattedMessage)
NotifyError(formattedMessage)
}
os.Exit(1)
}

View File

@@ -6,8 +6,9 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/go-mail/mail" "github.com/go-mail/mail"
"github.com/jkaninda/pg-bkup/pkg/logger"
"html/template" "html/template"
"io/ioutil" "io"
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
@@ -31,7 +32,7 @@ func parseTemplate[T any](data T, fileName string) (string, error) {
} }
func SendEmail(subject, body string) error { func SendEmail(subject, body string) error {
Info("Start sending email notification....") logger.Info("Start sending email notification....")
config := loadMailConfig() config := loadMailConfig()
emails := strings.Split(config.MailTo, ",") emails := strings.Split(config.MailTo, ",")
m := mail.NewMessage() m := mail.NewMessage()
@@ -43,16 +44,16 @@ func SendEmail(subject, body string) error {
d.TLSConfig = &tls.Config{InsecureSkipVerify: config.SkipTls} d.TLSConfig = &tls.Config{InsecureSkipVerify: config.SkipTls}
if err := d.DialAndSend(m); err != nil { if err := d.DialAndSend(m); err != nil {
Error("Error could not send email : %v", err) logger.Error("Error could not send email : %v", err)
return err return err
} }
Info("Email notification has been sent") logger.Info("Email notification has been sent")
return nil return nil
} }
func sendMessage(msg string) error { func sendMessage(msg string) error {
Info("Sending Telegram notification... ") logger.Info("Sending Telegram notification... ")
chatId := os.Getenv("TG_CHAT_ID") chatId := os.Getenv("TG_CHAT_ID")
body, _ := json.Marshal(map[string]string{ body, _ := json.Marshal(map[string]string{
"chat_id": chatId, "chat_id": chatId,
@@ -72,11 +73,11 @@ func sendMessage(msg string) error {
} }
code := response.StatusCode code := response.StatusCode
if code == 200 { if code == 200 {
Info("Telegram notification has been sent") logger.Info("Telegram notification has been sent")
return nil return nil
} else { } else {
body, _ := ioutil.ReadAll(response.Body) body, _ := io.ReadAll(response.Body)
Error("Error could not send message, error: %s", string(body)) logger.Error("Error could not send message, error: %s", string(body))
return fmt.Errorf("error could not send message %s", string(body)) return fmt.Errorf("error could not send message %s", string(body))
} }
@@ -96,29 +97,29 @@ func NotifySuccess(notificationData *NotificationData) {
"MAIL_TO", "MAIL_TO",
} }
//Email notification // Email notification
err := CheckEnvVars(mailVars) err := CheckEnvVars(mailVars)
if err == nil { if err == nil {
body, err := parseTemplate(*notificationData, "email.tmpl") body, err := parseTemplate(*notificationData, "email.tmpl")
if err != nil { if err != nil {
Error("Could not parse email template: %v", err) logger.Error("Could not parse email template: %v", err)
} }
err = SendEmail(fmt.Sprintf("✅ Database Backup Notification %s", notificationData.Database), body) err = SendEmail(fmt.Sprintf("✅ Database Backup Notification %s", notificationData.Database), body)
if err != nil { if err != nil {
Error("Could not send email: %v", err) logger.Error("Could not send email: %v", err)
} }
} }
//Telegram notification // Telegram notification
err = CheckEnvVars(vars) err = CheckEnvVars(vars)
if err == nil { if err == nil {
message, err := parseTemplate(*notificationData, "telegram.tmpl") message, err := parseTemplate(*notificationData, "telegram.tmpl")
if err != nil { if err != nil {
Error("Could not parse telegram template: %v", err) logger.Error("Could not parse telegram template: %v", err)
} }
err = sendMessage(message) err = sendMessage(message)
if err != nil { if err != nil {
Error("Could not send Telegram message: %v", err) logger.Error("Could not send Telegram message: %v", err)
} }
} }
} }
@@ -136,7 +137,7 @@ func NotifyError(error string) {
"MAIL_TO", "MAIL_TO",
} }
//Email notification // Email notification
err := CheckEnvVars(mailVars) err := CheckEnvVars(mailVars)
if err == nil { if err == nil {
body, err := parseTemplate(ErrorMessage{ body, err := parseTemplate(ErrorMessage{
@@ -145,14 +146,14 @@ func NotifyError(error string) {
BackupReference: os.Getenv("BACKUP_REFERENCE"), BackupReference: os.Getenv("BACKUP_REFERENCE"),
}, "email-error.tmpl") }, "email-error.tmpl")
if err != nil { if err != nil {
Error("Could not parse error template: %v", err) logger.Error("Could not parse error template: %v", err)
} }
err = SendEmail(fmt.Sprintf("🔴 Urgent: Database Backup Failure Notification"), body) err = SendEmail("🔴 Urgent: Database Backup Failure Notification", body)
if err != nil { if err != nil {
Error("Could not send email: %v", err) logger.Error("Could not send email: %v", err)
} }
} }
//Telegram notification // Telegram notification
err = CheckEnvVars(vars) err = CheckEnvVars(vars)
if err == nil { if err == nil {
message, err := parseTemplate(ErrorMessage{ message, err := parseTemplate(ErrorMessage{
@@ -161,13 +162,13 @@ func NotifyError(error string) {
BackupReference: os.Getenv("BACKUP_REFERENCE"), BackupReference: os.Getenv("BACKUP_REFERENCE"),
}, "telegram-error.tmpl") }, "telegram-error.tmpl")
if err != nil { if err != nil {
Error("Could not parse error template: %v", err) logger.Error("Could not parse error template: %v", err)
} }
err = sendMessage(message) err = sendMessage(message)
if err != nil { if err != nil {
Error("Could not send telegram message: %v", err) logger.Error("Could not send telegram message: %v", err)
} }
} }
} }

View File

@@ -1,5 +1,6 @@
// Package utils / // Package utils /
/***** /*
****
@author Jonas Kaninda @author Jonas Kaninda
@license MIT License <https://opensource.org/licenses/MIT> @license MIT License <https://opensource.org/licenses/MIT>
@Copyright © 2024 Jonas Kaninda @Copyright © 2024 Jonas Kaninda
@@ -8,6 +9,7 @@ package utils
import ( import (
"fmt" "fmt"
"github.com/jkaninda/pg-bkup/pkg/logger"
"github.com/robfig/cron/v3" "github.com/robfig/cron/v3"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"io" "io"
@@ -31,7 +33,13 @@ func WriteToFile(filePath, content string) error {
if err != nil { if err != nil {
return err return err
} }
defer file.Close() defer func(file *os.File) {
err := file.Close()
if err != nil {
return
}
}(file)
_, err = file.WriteString(content) _, err = file.WriteString(content)
return err return err
@@ -49,14 +57,25 @@ func CopyFile(src, dst string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to open source file: %v", err) return fmt.Errorf("failed to open source file: %v", err)
} }
defer sourceFile.Close() defer func(sourceFile *os.File) {
err := sourceFile.Close()
if err != nil {
return
}
}(sourceFile)
// Create the destination file // Create the destination file
destinationFile, err := os.Create(dst) destinationFile, err := os.Create(dst)
if err != nil { if err != nil {
return fmt.Errorf("failed to create destination file: %v", err) return fmt.Errorf("failed to create destination file: %v", err)
} }
defer destinationFile.Close() defer func(destinationFile *os.File) {
err := destinationFile.Close()
if err != nil {
return
}
}(destinationFile)
// Copy the content from source to destination // Copy the content from source to destination
_, err = io.Copy(destinationFile, sourceFile) _, err = io.Copy(destinationFile, sourceFile)
@@ -74,7 +93,7 @@ func CopyFile(src, dst string) error {
} }
func ChangePermission(filePath string, mod int) { func ChangePermission(filePath string, mod int) {
if err := os.Chmod(filePath, fs.FileMode(mod)); err != nil { if err := os.Chmod(filePath, fs.FileMode(mod)); err != nil {
Fatal("Error changing permissions of %s: %v\n", filePath, err) logger.Fatal("Error changing permissions of %s: %v\n", filePath, err)
} }
} }
@@ -83,7 +102,12 @@ func IsDirEmpty(name string) (bool, error) {
if err != nil { if err != nil {
return false, err return false, err
} }
defer f.Close() defer func(f *os.File) {
err := f.Close()
if err != nil {
return
}
}(f)
_, err = f.Readdirnames(1) _, err = f.Readdirnames(1)
if err == nil { if err == nil {
@@ -131,7 +155,7 @@ func GetEnvVariable(envName, oldEnvName string) string {
if err != nil { if err != nil {
return value return value
} }
Warn("%s is deprecated, please use %s instead! ", oldEnvName, envName) logger.Warn("%s is deprecated, please use %s instead! ", oldEnvName, envName)
} }
} }
return value return value
@@ -178,7 +202,7 @@ func GetIntEnv(envName string) int {
} }
ret, err := strconv.Atoi(val) ret, err := strconv.Atoi(val)
if err != nil { if err != nil {
Error("Error: %v", err) logger.Error("Error: %v", err)
} }
return ret return ret
} }
@@ -203,14 +227,13 @@ func CronNextTime(cronExpr string) time.Time {
// Parse the cron expression // Parse the cron expression
schedule, err := cron.ParseStandard(cronExpr) schedule, err := cron.ParseStandard(cronExpr)
if err != nil { if err != nil {
Error("Error parsing cron expression: %s", err) logger.Error("Error parsing cron expression: %s", err)
return time.Time{} return time.Time{}
} }
// Get the current time // Get the current time
now := time.Now() now := time.Now()
// Get the next scheduled time // Get the next scheduled time
next := schedule.Next(now) next := schedule.Next(now)
//Info("The next scheduled time is: %v\n", next)
return next return next
} }
func UsageErrorf(cmd *cobra.Command, message string, args ...interface{}) error { func UsageErrorf(cmd *cobra.Command, message string, args ...interface{}) error {