Merge pull request #92 from jkaninda/refactor

Refactor
This commit is contained in:
2024-10-02 04:14:22 +02:00
committed by GitHub
10 changed files with 149 additions and 120 deletions

View File

@@ -2,8 +2,6 @@ name: Build
on:
push:
branches: ['develop']
env:
BUILDKIT_IMAGE: jkaninda/pg-bkup
jobs:
docker:
runs-on: ubuntu-latest
@@ -27,6 +25,8 @@ jobs:
push: true
file: "./docker/Dockerfile"
platforms: linux/amd64,linux/arm64,linux/arm/v7
build-args: |
appVersion=develop-${{ github.sha }}
tags: |
"${{env.BUILDKIT_IMAGE}}:develop-${{ github.sha }}"
"${{vars.BUILDKIT_IMAGE}}:develop-${{ github.sha }}"

View File

@@ -3,8 +3,6 @@ on:
push:
tags:
- v1.**
env:
BUILDKIT_IMAGE: jkaninda/pg-bkup
jobs:
docker:
runs-on: ubuntu-latest
@@ -41,9 +39,11 @@ jobs:
push: true
file: "./docker/Dockerfile"
platforms: linux/amd64,linux/arm64,linux/arm/v7
build-args: |
appVersion=${{ env.TAG_NAME }}
tags: |
"${{env.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
"${{env.BUILDKIT_IMAGE}}:latest"
"ghcr.io/${{env.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
"ghcr.io/${{env.BUILDKIT_IMAGE}}:latest"
"${{vars.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
"${{vars.BUILDKIT_IMAGE}}:latest"
"ghcr.io/${{vars.BUILDKIT_IMAGE}}:${{ env.TAG_NAME }}"
"ghcr.io/${{vars.BUILDKIT_IMAGE}}:latest"

View File

@@ -29,8 +29,6 @@ func init() {
//Backup
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled")
BackupCmd.PersistentFlags().StringP("period", "", "", "Schedule period time | Deprecated")
BackupCmd.PersistentFlags().StringP("cron-expression", "", "", "Backup cron expression")
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "Delete files created more than specified days ago, default 7 days")

View File

@@ -23,14 +23,15 @@ ENV AWS_SECRET_KEY=""
ENV AWS_S3_PATH=""
ENV AWS_REGION="us-west-2"
ENV AWS_DISABLE_SSL="false"
ENV AWS_FORCE_PATH_STYLE="true"
ENV GPG_PASSPHRASE=""
ENV SSH_USER=""
ENV SSH_PASSWORD=""
ENV SSH_HOST_NAME=""
ENV SSH_HOST=""
ENV SSH_IDENTIFY_FILE=""
ENV SSH_PORT=22
ENV REMOTE_PATH=""
ENV FTP_HOST_NAME=""
ENV FTP_HOST=""
ENV FTP_PORT=21
ENV FTP_USER=""
ENV FTP_PASSWORD=""
@@ -39,7 +40,6 @@ ENV TARGET_DB_PORT=5432
ENV TARGET_DB_NAME=""
ENV TARGET_DB_USERNAME=""
ENV TARGET_DB_PASSWORD=""
ENV VERSION="v1.2.11"
ENV BACKUP_CRON_EXPRESSION=""
ENV TG_TOKEN=""
ENV TG_CHAT_ID=""
@@ -47,7 +47,10 @@ ENV TZ=UTC
ARG WORKDIR="/config"
ARG BACKUPDIR="/backup"
ARG BACKUP_TMP_DIR="/tmp/backup"
ARG appVersion="v1.2.11"
ENV VERSION=${appVersion}
LABEL author="Jonas Kaninda"
LABEL version=${appVersion}
RUN apk --update add --no-cache postgresql-client gnupg tzdata
RUN mkdir $WORKDIR

View File

@@ -30,7 +30,7 @@ services:
- DB_USERNAME=username
- DB_PASSWORD=password
## FTP config
- FTP_HOST_NAME="hostname"
- FTP_HOST="hostname"
- FTP_PORT=21
- FTP_USER=user
- FTP_PASSWORD=password

View File

@@ -33,7 +33,7 @@ services:
- DB_USERNAME=username
- DB_PASSWORD=password
## SSH config
- SSH_HOST_NAME="hostname"
- SSH_HOST="hostname"
- SSH_PORT=22
- SSH_USER=user
- REMOTE_PATH=/home/jkaninda/backups
@@ -73,7 +73,7 @@ services:
- DB_USERNAME=username
- DB_PASSWORD=password
## SSH config
- SSH_HOST_NAME="hostname"
- SSH_HOST="hostname"
- SSH_PORT=22
- SSH_USER=user
- REMOTE_PATH=/home/jkaninda/backups
@@ -124,7 +124,7 @@ spec:
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: SSH_HOST_NAME
- name: SSH_HOST
value: ""
- name: SSH_PORT
value: "22"

View File

@@ -47,16 +47,17 @@ Backup, restore and migrate targets, schedule and retention are configured using
| AWS_BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| AWS_REGION | Optional, required for S3 storage | AWS Region |
| AWS_DISABLE_SSL | Optional, required for S3 storage | Disable SSL |
| AWS_FORCE_PATH_STYLE | Optional, required for S3 storage | Force path style |
| FILE_NAME | Optional if it was provided from the --file flag | Database file to restore (extensions: .sql, .sql.gz) |
| GPG_PASSPHRASE | Optional, required to encrypt and restore backup | GPG passphrase |
| BACKUP_CRON_EXPRESSION | Optional if it was provided from the `--cron-expression` flag | Backup cron expression for docker in scheduled mode |
| SSH_HOST_NAME | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_HOST | Optional, required for SSH storage | ssh remote hostname or ip |
| SSH_USER | Optional, required for SSH storage | ssh remote user |
| SSH_PASSWORD | Optional, required for SSH storage | ssh remote user's password |
| SSH_IDENTIFY_FILE | Optional, required for SSH storage | ssh remote user's private key |
| SSH_PORT | Optional, required for SSH storage | ssh remote server port |
| REMOTE_PATH | Optional, required for SSH or FTP storage | remote path (/home/toto/backup) |
| FTP_HOST_NAME | Optional, required for FTP storage | FTP host name |
| FTP_HOST | Optional, required for FTP storage | FTP host name |
| FTP_PORT | Optional, required for FTP storage | FTP server port number |
| FTP_USER | Optional, required for FTP storage | FTP user |
| FTP_PASSWORD | Optional, required for FTP storage | FTP user password |

View File

@@ -7,9 +7,11 @@
package pkg
import (
"fmt"
"github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra"
"os"
"strconv"
)
type Config struct {
@@ -52,20 +54,22 @@ type FTPConfig struct {
remotePath string
}
func initFtpConfig() *FTPConfig {
//Initialize backup configs
fConfig := FTPConfig{}
fConfig.host = os.Getenv("FTP_HOST_NAME")
fConfig.user = os.Getenv("FTP_USER")
fConfig.password = os.Getenv("FTP_PASSWORD")
fConfig.port = os.Getenv("FTP_PORT")
fConfig.remotePath = os.Getenv("REMOTE_PATH")
err := utils.CheckEnvVars(ftpVars)
if err != nil {
utils.Error("Please make sure all required environment variables for FTP are set")
utils.Fatal("Error checking environment variables: %s", err)
// SSHConfig holds the SSH connection details
type SSHConfig struct {
user string
password string
hostName string
port string
identifyFile string
}
return &fConfig
type AWSConfig struct {
endpoint string
bucket string
accessKey string
secretKey string
region string
disableSsl bool
forcePathStyle bool
}
func initDbConfig(cmd *cobra.Command) *dbConfig {
@@ -85,6 +89,64 @@ func initDbConfig(cmd *cobra.Command) *dbConfig {
}
return &dConf
}
// loadSSHConfig loads the SSH configuration from environment variables
func loadSSHConfig() (*SSHConfig, error) {
utils.GetEnvVariable("SSH_HOST", "SSH_HOST_NAME")
sshVars := []string{"SSH_USER", "SSH_HOST", "SSH_PORT", "REMOTE_PATH"}
err := utils.CheckEnvVars(sshVars)
if err != nil {
return nil, fmt.Errorf("error missing environment variables: %w", err)
}
return &SSHConfig{
user: os.Getenv("SSH_USER"),
password: os.Getenv("SSH_PASSWORD"),
hostName: os.Getenv("SSH_HOST"),
port: os.Getenv("SSH_PORT"),
identifyFile: os.Getenv("SSH_IDENTIFY_FILE"),
}, nil
}
func initFtpConfig() *FTPConfig {
//Initialize data configs
fConfig := FTPConfig{}
fConfig.host = utils.GetEnvVariable("FTP_HOST", "FTP_HOST_NAME")
fConfig.user = os.Getenv("FTP_USER")
fConfig.password = os.Getenv("FTP_PASSWORD")
fConfig.port = os.Getenv("FTP_PORT")
fConfig.remotePath = os.Getenv("REMOTE_PATH")
err := utils.CheckEnvVars(ftpVars)
if err != nil {
utils.Error("Please make sure all required environment variables for FTP are set")
utils.Fatal("Error missing environment variables: %s", err)
}
return &fConfig
}
func initAWSConfig() *AWSConfig {
//Initialize AWS configs
aConfig := AWSConfig{}
aConfig.endpoint = utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
aConfig.accessKey = utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
aConfig.secretKey = utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY")
aConfig.bucket = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
aConfig.region = os.Getenv("AWS_REGION")
disableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL"))
if err != nil {
utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err)
}
forcePathStyle, err := strconv.ParseBool(os.Getenv("AWS_FORCE_PATH_STYLE"))
if err != nil {
utils.Fatal("Unable to parse AWS_FORCE_PATH_STYLE env var: %s", err)
}
aConfig.disableSsl = disableSsl
aConfig.forcePathStyle = forcePathStyle
err = utils.CheckEnvVars(awsVars)
if err != nil {
utils.Error("Please make sure all required environment variables for AWS S3 are set")
utils.Fatal("Error checking environment variables: %s", err)
}
return &aConfig
}
func initBackupConfig(cmd *cobra.Command) *BackupConfig {
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "cron-expression", "BACKUP_CRON_EXPRESSION")
@@ -136,7 +198,6 @@ func initRestoreConfig(cmd *cobra.Command) *RestoreConfig {
remotePath := utils.GetEnvVariable("REMOTE_PATH", "SSH_REMOTE_PATH")
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
_, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
gpqPassphrase := os.Getenv("GPG_PASSPHRASE")
//Initialize restore configs

View File

@@ -17,44 +17,19 @@ import (
"net/http"
"os"
"path/filepath"
"strconv"
"time"
)
// CreateSession creates a new AWS session
func CreateSession() (*session.Session, error) {
// AwsVars Required environment variables for AWS S3 storage
var awsVars = []string{
"AWS_S3_ENDPOINT",
"AWS_S3_BUCKET_NAME",
"AWS_ACCESS_KEY",
"AWS_SECRET_KEY",
"AWS_REGION",
}
endPoint := utils.GetEnvVariable("AWS_S3_ENDPOINT", "S3_ENDPOINT")
accessKey := utils.GetEnvVariable("AWS_ACCESS_KEY", "ACCESS_KEY")
secretKey := utils.GetEnvVariable("AWS_SECRET_KEY", "SECRET_KEY")
_ = utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
region := os.Getenv("AWS_REGION")
awsDisableSsl, err := strconv.ParseBool(os.Getenv("AWS_DISABLE_SSL"))
if err != nil {
utils.Fatal("Unable to parse AWS_DISABLE_SSL env var: %s", err)
}
err = utils.CheckEnvVars(awsVars)
if err != nil {
utils.Fatal("Error checking environment variables\n: %s", err)
}
awsConfig := initAWSConfig()
// Configure to use MinIO Server
s3Config := &aws.Config{
Credentials: credentials.NewStaticCredentials(accessKey, secretKey, ""),
Endpoint: aws.String(endPoint),
Region: aws.String(region),
DisableSSL: aws.Bool(awsDisableSsl),
S3ForcePathStyle: aws.Bool(true),
Credentials: credentials.NewStaticCredentials(awsConfig.accessKey, awsConfig.secretKey, ""),
Endpoint: aws.String(awsConfig.endpoint),
Region: aws.String(awsConfig.region),
DisableSSL: aws.Bool(awsConfig.disableSsl),
S3ForcePathStyle: aws.Bool(awsConfig.forcePathStyle),
}
return session.NewSession(s3Config)
@@ -106,7 +81,7 @@ func DownloadFile(destinationPath, key, bucket, prefix string) error {
if err != nil {
return err
}
utils.Info("Download backup from S3 storage...")
utils.Info("Download data from S3 storage...")
file, err := os.Create(filepath.Join(destinationPath, key))
if err != nil {
utils.Error("Failed to create file", err)

View File

@@ -18,81 +18,73 @@ import (
"path/filepath"
)
func CopyToRemote(fileName, remotePath string) error {
sshUser := os.Getenv("SSH_USER")
sshPassword := os.Getenv("SSH_PASSWORD")
sshHostName := os.Getenv("SSH_HOST_NAME")
sshPort := os.Getenv("SSH_PORT")
sshIdentifyFile := os.Getenv("SSH_IDENTIFY_FILE")
err := utils.CheckEnvVars(sshVars)
if err != nil {
utils.Fatal(fmt.Sprintf("Error checking environment variables\n: %s", err))
}
clientConfig, _ := auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
if sshIdentifyFile != "" && utils.FileExists(sshIdentifyFile) {
clientConfig, _ = auth.PrivateKey(sshUser, sshIdentifyFile, ssh.InsecureIgnoreHostKey())
// createSSHClientConfig sets up the SSH client configuration based on the provided SSHConfig
func createSSHClientConfig(sshConfig *SSHConfig) (ssh.ClientConfig, error) {
if sshConfig.identifyFile != "" && utils.FileExists(sshConfig.identifyFile) {
return auth.PrivateKey(sshConfig.user, sshConfig.identifyFile, ssh.InsecureIgnoreHostKey())
} else {
if sshPassword == "" {
return errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty")
if sshConfig.password == "" {
return ssh.ClientConfig{}, errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty")
}
utils.Warn("Accessing the remote server using password, which is not recommended.")
return auth.PasswordKey(sshConfig.user, sshConfig.password, ssh.InsecureIgnoreHostKey())
}
}
utils.Warn("Accessing the remote server using password, password is not recommended")
clientConfig, _ = auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
// CopyToRemote copies a file to a remote server via SCP
func CopyToRemote(fileName, remotePath string) error {
// Load environment variables
sshConfig, err := loadSSHConfig()
if err != nil {
return fmt.Errorf("failed to load SSH configuration: %w", err)
}
// Initialize SSH client config
clientConfig, err := createSSHClientConfig(sshConfig)
if err != nil {
return fmt.Errorf("failed to create SSH client config: %w", err)
}
// Create a new SCP client
client := scp.NewClient(fmt.Sprintf("%s:%s", sshHostName, sshPort), &clientConfig)
client := scp.NewClient(fmt.Sprintf("%s:%s", sshConfig.hostName, sshConfig.port), &clientConfig)
// Connect to the remote server
err = client.Connect()
if err != nil {
return errors.New("couldn't establish a connection to the remote server")
return errors.New("Couldn't establish a connection to the remote server\n")
}
// Open a file
file, _ := os.Open(filepath.Join(tmpPath, fileName))
// Close client connection after the file has been copied
// Open the local file
filePath := filepath.Join(tmpPath, fileName)
file, err := os.Open(filePath)
if err != nil {
return fmt.Errorf("failed to open file %s: %w", filePath, err)
}
defer client.Close()
// Close the file after it has been copied
defer file.Close()
// the context can be adjusted to provide time-outs or inherit from other contexts if this is embedded in a larger application.
// Copy file to the remote server
err = client.CopyFromFile(context.Background(), *file, filepath.Join(remotePath, fileName), "0655")
if err != nil {
fmt.Println("Error while copying file ")
return err
return fmt.Errorf("failed to copy file to remote server: %w", err)
}
return nil
}
func CopyFromRemote(fileName, remotePath string) error {
sshUser := os.Getenv("SSH_USER")
sshPassword := os.Getenv("SSH_PASSWORD")
sshHostName := os.Getenv("SSH_HOST_NAME")
sshPort := os.Getenv("SSH_PORT")
sshIdentifyFile := os.Getenv("SSH_IDENTIFY_FILE")
err := utils.CheckEnvVars(sshVars)
// Load environment variables
sshConfig, err := loadSSHConfig()
if err != nil {
utils.Fatal("Error checking environment variables\n: %s", err)
return fmt.Errorf("failed to load SSH configuration: %w", err)
}
clientConfig, _ := auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
if sshIdentifyFile != "" && utils.FileExists(sshIdentifyFile) {
clientConfig, _ = auth.PrivateKey(sshUser, sshIdentifyFile, ssh.InsecureIgnoreHostKey())
} else {
if sshPassword == "" {
return errors.New("SSH_PASSWORD environment variable is required if SSH_IDENTIFY_FILE is empty\n")
// Initialize SSH client config
clientConfig, err := createSSHClientConfig(sshConfig)
if err != nil {
return fmt.Errorf("failed to create SSH client config: %w", err)
}
utils.Warn("Accessing the remote server using password, password is not recommended\n")
clientConfig, _ = auth.PasswordKey(sshUser, sshPassword, ssh.InsecureIgnoreHostKey())
}
// Create a new SCP client
client := scp.NewClient(fmt.Sprintf("%s:%s", sshHostName, sshPort), &clientConfig)
client := scp.NewClient(fmt.Sprintf("%s:%s", sshConfig.hostName, sshConfig.port), &clientConfig)
// Connect to the remote server
err = client.Connect()
@@ -107,7 +99,6 @@ func CopyFromRemote(fileName, remotePath string) error {
}
defer file.Close()
// the context can be adjusted to provide time-outs or inherit from other contexts if this is embedded in a larger application.
err = client.CopyFromRemote(context.Background(), file, filepath.Join(remotePath, fileName))
if err != nil {