feat: add migrate database from a source to a target databse

This commit is contained in:
2024-08-29 21:49:35 +02:00
parent 99ae6a5b9d
commit 00ca15e94f
13 changed files with 298 additions and 121 deletions

View File

@@ -21,6 +21,8 @@ var BackupCmd = &cobra.Command{
func init() {
//Backup
BackupCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
BackupCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Execution mode. default or scheduled")
BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Schedule period time")
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Delete old backup, default disabled")

21
cmd/migrate.go Normal file
View File

@@ -0,0 +1,21 @@
package cmd
import (
"github.com/jkaninda/pg-bkup/pkg"
"github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra"
)
var MigrateCmd = &cobra.Command{
Use: "migrate",
Short: "Migrate database from a source database to a target database",
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
pkg.StartMigration(cmd)
} else {
utils.Fatal("Error, no argument required")
}
},
}

View File

@@ -24,5 +24,7 @@ var RestoreCmd = &cobra.Command{
func init() {
//Restore
RestoreCmd.PersistentFlags().StringP("file", "f", "", "File name of database")
RestoreCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
RestoreCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
}

View File

@@ -18,7 +18,6 @@ var rootCmd = &cobra.Command{
Example: utils.MainExample,
Version: appVersion,
}
var operation = ""
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
@@ -30,12 +29,10 @@ func Execute() {
}
func init() {
rootCmd.PersistentFlags().StringP("storage", "s", "local", "Storage. local or s3")
rootCmd.PersistentFlags().StringP("path", "P", "", "AWS S3 path without file name. eg: /custom_path or ssh remote path `/home/foo/backup`")
rootCmd.PersistentFlags().StringP("dbname", "d", "", "Database name")
rootCmd.PersistentFlags().IntP("port", "p", 5432, "Database port")
rootCmd.PersistentFlags().StringVarP(&operation, "operation", "o", "", "Set operation, for old version only")
rootCmd.AddCommand(VersionCmd)
rootCmd.AddCommand(BackupCmd)
rootCmd.AddCommand(RestoreCmd)
rootCmd.AddCommand(MigrateCmd)
}

View File

@@ -14,7 +14,7 @@ ENV DB_HOST=""
ENV DB_NAME=""
ENV DB_USERNAME=""
ENV DB_PASSWORD=""
ENV DB_PORT="5432"
ENV DB_PORT=5432
ENV STORAGE=local
ENV AWS_S3_ENDPOINT=""
ENV AWS_S3_BUCKET_NAME=""
@@ -30,8 +30,13 @@ ENV SSH_PASSWORD=""
ENV SSH_HOST_NAME=""
ENV SSH_IDENTIFY_FILE=""
ENV SSH_PORT="22"
ENV SOURCE_DB_HOST=""
ENV SOURCE_DB_PORT=5432
ENV SOURCE_DB_NAME=""
ENV SOURCE_DB_USERNAME=""
ENV SOURCE_DB_PASSWORD=""
ARG DEBIAN_FRONTEND=noninteractive
ENV VERSION="v1.2.2"
ENV VERSION="v1.2.3"
ENV BACKUP_CRON_EXPRESSION=""
ENV GNUPGHOME="/tmp/gnupg"
ARG WORKDIR="/app"

75
docs/how-tos/migrate.md Normal file
View File

@@ -0,0 +1,75 @@
---
title: Migrate database
layout: default
parent: How Tos
nav_order: 9
---
# Migrate database
To migrate the database, you need to add `migrate` command.
{: .note }
The pg backup has another great feature: migrating your database from a source database to another.
As you know, to restore a database from a source to a target database, you need 2 operations: to start by backing up the source database and then restoring the source backed database to the target database.
Instead of proceeding like that, you can use the integrated feature `(migrate)` that will help you to migrate your database by doing only one operation.
### Docker compose
```yml
services:
pg-bkup:
# In production, it is advised to lock your image tag to a proper
# release version instead of using `latest`.
# Check https://github.com/jkaninda/pg-bkup/releases
# for a list of available releases.
image: jkaninda/pg-bkup
container_name: pg-bkup
command: migrate
volumes:
- ./backup:/backup
environment:
## Target database
- DB_PORT=5432
- DB_HOST=postgres
- DB_NAME=database
- DB_USERNAME=username
- DB_PASSWORD=password
## Source database
- SOURCE_DB_HOST=postgres
- SOURCE_DB_PORT=5432
- SOURCE_DB_NAME=sourcedb
- SOURCE_DB_USERNAME=jonas
- SOURCE_DB_PASSWORD=password
# pg-bkup container must be connected to the same network with your database
networks:
- web
networks:
web:
```
### Migrate database using Docker CLI
```env
## Target database
DB_PORT=5432
DB_HOST=postgres
DB_NAME=targetdb
DB_USERNAME=targetuser
DB_PASSWORD=password
## Source database
SOURCE_DB_HOST=postgres
SOURCE_DB_PORT=5432
SOURCE_DB_NAME=sourcedb
SOURCE_DB_USERNAME=sourceuser
SOURCE_DB_PASSWORD=password
```
```shell
docker run --rm --network your_network_name \
--env-file your-env
-v $PWD/backup:/backup/ \
jkaninda/pg-bkup migrate -d database_name
```

View File

@@ -17,11 +17,10 @@ import (
)
func StartBackup(cmd *cobra.Command) {
_, _ = cmd.Flags().GetString("operation")
//Set env
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "dbname", "DB_NAME")
utils.GetEnv(cmd, "port", "DB_PORT")
//utils.GetEnv(cmd, "dbname", "DB_NAME")
//utils.GetEnv(cmd, "port", "DB_PORT")
utils.GetEnv(cmd, "period", "BACKUP_CRON_EXPRESSION")
//Get flag value and set env
@@ -32,37 +31,38 @@ func StartBackup(cmd *cobra.Command) {
prune, _ := cmd.Flags().GetBool("prune")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
executionMode, _ = cmd.Flags().GetString("mode")
dbName = os.Getenv("DB_NAME")
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
_ = utils.GetEnv(cmd, "path", "AWS_S3_PATH")
dbConf = getDbConfig(cmd)
//
if gpgPassphrase != "" {
encryption = true
}
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbName, time.Now().Format("20060102_150405"))
backupFileName := fmt.Sprintf("%s_%s.sql.gz", dbConf.dbName, time.Now().Format("20060102_150405"))
if disableCompression {
backupFileName = fmt.Sprintf("%s_%s.sql", dbName, time.Now().Format("20060102_150405"))
backupFileName = fmt.Sprintf("%s_%s.sql", dbConf.dbName, time.Now().Format("20060102_150405"))
}
if executionMode == "default" {
switch storage {
case "s3":
s3Backup(backupFileName, disableCompression, prune, backupRetention, encryption)
s3Backup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "local":
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
case "ssh", "remote":
sshBackup(backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
sshBackup(dbConf, backupFileName, remotePath, disableCompression, prune, backupRetention, encryption)
case "ftp":
utils.Fatal("Not supported storage type: %s", storage)
default:
localBackup(backupFileName, disableCompression, prune, backupRetention, encryption)
localBackup(dbConf, backupFileName, disableCompression, prune, backupRetention, encryption)
}
} else if executionMode == "scheduled" {
scheduledMode(storage)
scheduledMode(dbConf, storage)
} else {
utils.Fatal("Error, unknown execution mode!")
}
@@ -70,7 +70,7 @@ func StartBackup(cmd *cobra.Command) {
}
// Run in scheduled mode
func scheduledMode(storage string) {
func scheduledMode(db *dbConfig, storage string) {
fmt.Println()
fmt.Println("**********************************")
@@ -81,7 +81,7 @@ func scheduledMode(storage string) {
utils.Info("Storage type %s ", storage)
//Test database connexion
utils.TestDatabaseConnection()
testDatabaseConnection(db)
utils.Info("Creating backup job...")
CreateCrontabScript(disableCompression, storage)
@@ -120,27 +120,17 @@ func scheduledMode(storage string) {
}
// BackupDatabase backup database
func BackupDatabase(backupFileName string, disableCompression bool) {
dbHost = os.Getenv("DB_HOST")
dbPassword = os.Getenv("DB_PASSWORD")
dbUserName = os.Getenv("DB_USERNAME")
dbName = os.Getenv("DB_NAME")
dbPort = os.Getenv("DB_PORT")
func BackupDatabase(db *dbConfig, backupFileName string, disableCompression bool) {
storagePath = os.Getenv("STORAGE_PATH")
utils.Info("Starting database backup...")
err := utils.CheckEnvVars(dbHVars)
if err != nil {
utils.Error("Please make sure all required environment variables for database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
err = os.Setenv("PGPASSWORD", dbPassword)
err := os.Setenv("PGPASSWORD", db.dbPassword)
if err != nil {
return
}
utils.TestDatabaseConnection()
testDatabaseConnection(db)
// Backup Database database
utils.Info("Backing up database...")
@@ -148,10 +138,10 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
if disableCompression {
// Execute pg_dump
cmd := exec.Command("pg_dump",
"-h", dbHost,
"-p", dbPort,
"-U", dbUserName,
"-d", dbName,
"-h", db.dbHost,
"-p", db.dbPort,
"-U", db.dbUserName,
"-d", db.dbName,
)
output, err := cmd.Output()
if err != nil {
@@ -172,10 +162,10 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
} else {
// Execute pg_dump
cmd := exec.Command("pg_dump",
"-h", dbHost,
"-p", dbPort,
"-U", dbUserName,
"-d", dbName,
"-h", db.dbHost,
"-p", db.dbPort,
"-U", db.dbUserName,
"-d", db.dbName,
)
stdout, err := cmd.StdoutPipe()
if err != nil {
@@ -200,9 +190,9 @@ func BackupDatabase(backupFileName string, disableCompression bool) {
utils.Info("Database has been backed up")
}
func localBackup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func localBackup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
utils.Info("Backup database to local storage")
BackupDatabase(backupFileName, disableCompression)
BackupDatabase(db, backupFileName, disableCompression)
finalFileName := backupFileName
if encrypt {
encryptBackup(backupFileName)
@@ -218,12 +208,12 @@ func localBackup(backupFileName string, disableCompression bool, prune bool, bac
deleteTemp()
}
func s3Backup(backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func s3Backup(db *dbConfig, backupFileName string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
s3Path := utils.GetEnvVariable("AWS_S3_PATH", "S3_PATH")
utils.Info("Backup database to s3 storage")
//Backup database
BackupDatabase(backupFileName, disableCompression)
BackupDatabase(db, backupFileName, disableCompression)
finalFileName := backupFileName
if encrypt {
encryptBackup(backupFileName)
@@ -255,10 +245,10 @@ func s3Backup(backupFileName string, disableCompression bool, prune bool, backup
//Delete temp
deleteTemp()
}
func sshBackup(backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
func sshBackup(db *dbConfig, backupFileName, remotePath string, disableCompression bool, prune bool, backupRetention int, encrypt bool) {
utils.Info("Backup database to Remote server")
//Backup database
BackupDatabase(backupFileName, disableCompression)
BackupDatabase(db, backupFileName, disableCompression)
finalFileName := backupFileName
if encrypt {
encryptBackup(backupFileName)

View File

@@ -1,4 +1,59 @@
package pkg
import (
"github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra"
"os"
)
type Config struct {
}
type dbConfig struct {
dbHost string
dbPort string
dbName string
dbUserName string
dbPassword string
}
type dbSourceConfig struct {
sourceDbHost string
sourceDbPort string
sourceDbUserName string
sourceDbPassword string
sourceDbName string
}
func getDbConfig(cmd *cobra.Command) *dbConfig {
//Set env
utils.GetEnv(cmd, "dbname", "DB_NAME")
utils.GetEnv(cmd, "port", "DB_PORT")
dConf := dbConfig{}
dConf.dbHost = os.Getenv("DB_HOST")
dConf.dbPort = os.Getenv("DB_PORT")
dConf.dbName = os.Getenv("DB_NAME")
dConf.dbUserName = os.Getenv("DB_USERNAME")
dConf.dbPassword = os.Getenv("DB_PASSWORD")
err := utils.CheckEnvVars(dbHVars)
if err != nil {
utils.Error("Please make sure all required environment variables for database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
return &dConf
}
func getSourceDbConfig() *dbSourceConfig {
sdbConfig := dbSourceConfig{}
sdbConfig.sourceDbHost = os.Getenv("SOURCE_DB_HOST")
sdbConfig.sourceDbPort = os.Getenv("SOURCE_DB_PORT")
sdbConfig.sourceDbName = os.Getenv("SOURCE_DB_NAME")
sdbConfig.sourceDbUserName = os.Getenv("SOURCE_DB_USERNAME")
sdbConfig.sourceDbPassword = os.Getenv("SOURCE_DB_PASSWORD")
err := utils.CheckEnvVars(sdbRVars)
if err != nil {
utils.Error("Please make sure all required environment variables for source database are set")
utils.Fatal("Error checking environment variables: %s", err)
}
return &sdbConfig
}

View File

@@ -1,8 +1,10 @@
package pkg
import (
"bytes"
"github.com/jkaninda/pg-bkup/utils"
"os"
"os/exec"
"path/filepath"
"time"
)
@@ -94,3 +96,38 @@ func deleteTemp() {
utils.Info("Deleting %s ... done", tmpPath)
}
}
// TestDatabaseConnection tests the database connection
func testDatabaseConnection(db *dbConfig) {
utils.Info("Connecting to %s database ...", db.dbName)
// Test database connection
query := "SELECT version();"
// Set the environment variable for the database password
err := os.Setenv("PGPASSWORD", db.dbPassword)
if err != nil {
return
}
// Prepare the psql command
cmd := exec.Command("psql",
"-U", db.dbUserName, // database user
"-d", db.dbName, // database name
"-h", db.dbHost, // host
"-p", db.dbPort, // port
"-c", query, // SQL command to execute
)
// Capture the output
var out bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &out
// Run the command and capture any errors
err = cmd.Run()
if err != nil {
utils.Error("Error running psql command: %v\nOutput: %s\n", err, out.String())
return
}
utils.Info("Successfully connected to %s database", db.dbName)
}

31
pkg/migrate.go Normal file
View File

@@ -0,0 +1,31 @@
package pkg
import (
"fmt"
"github.com/jkaninda/pg-bkup/utils"
"github.com/spf13/cobra"
"time"
)
func StartMigration(cmd *cobra.Command) {
utils.Info("Starting database migration...")
//Get DB config
dbConf = getDbConfig(cmd)
sDbConf = getSourceDbConfig()
//Generate file name
backupFileName := fmt.Sprintf("%s_%s.sql", sDbConf.sourceDbName, time.Now().Format("20060102_150405"))
//Backup Source Database
newDbConfig := dbConfig{}
newDbConfig.dbHost = sDbConf.sourceDbHost
newDbConfig.dbPort = sDbConf.sourceDbPort
newDbConfig.dbName = sDbConf.sourceDbName
newDbConfig.dbUserName = sDbConf.sourceDbUserName
newDbConfig.dbPassword = sDbConf.sourceDbPassword
BackupDatabase(&newDbConfig, backupFileName, true)
//Restore source database into target database
utils.Info("Restoring [%s] database into [%s] database...", sDbConf.sourceDbName, dbConf.dbName)
RestoreDatabase(dbConf, backupFileName)
utils.Info("[%s] database has been restored into [%s] database", sDbConf.sourceDbName, dbConf.dbName)
utils.Info("Database migration completed!")
}

View File

@@ -13,8 +13,6 @@ func StartRestore(cmd *cobra.Command) {
//Set env
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "dbname", "DB_NAME")
utils.GetEnv(cmd, "port", "DB_PORT")
//Get flag value and set env
s3Path := utils.GetEnv(cmd, "path", "AWS_S3_PATH")
@@ -23,47 +21,46 @@ func StartRestore(cmd *cobra.Command) {
file = utils.GetEnv(cmd, "file", "FILE_NAME")
executionMode, _ = cmd.Flags().GetString("mode")
bucket := utils.GetEnvVariable("AWS_S3_BUCKET_NAME", "BUCKET_NAME")
dbConf = getDbConfig(cmd)
switch storage {
case "s3":
restoreFromS3(file, bucket, s3Path)
restoreFromS3(dbConf, file, bucket, s3Path)
case "local":
utils.Info("Restore database from local")
copyToTmp(storagePath, file)
RestoreDatabase(file)
RestoreDatabase(dbConf, file)
case "ssh":
restoreFromRemote(file, remotePath)
restoreFromRemote(dbConf, file, remotePath)
case "ftp":
utils.Fatal("Restore from FTP is not yet supported")
default:
utils.Info("Restore database from local")
RestoreDatabase(file)
copyToTmp(storagePath, file)
RestoreDatabase(dbConf, file)
}
}
func restoreFromS3(file, bucket, s3Path string) {
func restoreFromS3(db *dbConfig, file, bucket, s3Path string) {
utils.Info("Restore database from s3")
err := utils.DownloadFile(tmpPath, file, bucket, s3Path)
if err != nil {
utils.Fatal("Error download file from s3 %s %v ", file, err)
}
RestoreDatabase(file)
RestoreDatabase(db, file)
}
func restoreFromRemote(file, remotePath string) {
func restoreFromRemote(db *dbConfig, file, remotePath string) {
utils.Info("Restore database from remote server")
err := CopyFromRemote(file, remotePath)
if err != nil {
utils.Fatal("Error download file from remote server: %s %v", filepath.Join(remotePath, file), err)
}
RestoreDatabase(file)
RestoreDatabase(db, file)
}
// RestoreDatabase restore database
func RestoreDatabase(file string) {
dbHost = os.Getenv("DB_HOST")
dbPassword = os.Getenv("DB_PASSWORD")
dbUserName = os.Getenv("DB_USERNAME")
dbName = os.Getenv("DB_NAME")
dbPort = os.Getenv("DB_PORT")
func RestoreDatabase(db *dbConfig, file string) {
gpgPassphrase := os.Getenv("GPG_PASSPHRASE")
if file == "" {
utils.Fatal("Error, file required")
@@ -93,17 +90,17 @@ func RestoreDatabase(file string) {
if utils.FileExists(fmt.Sprintf("%s/%s", tmpPath, file)) {
err := os.Setenv("PGPASSWORD", dbPassword)
err := os.Setenv("PGPASSWORD", db.dbPassword)
if err != nil {
return
}
utils.TestDatabaseConnection()
testDatabaseConnection(db)
utils.Info("Restoring database...")
extension := filepath.Ext(fmt.Sprintf("%s/%s", tmpPath, file))
// Restore from compressed file / .sql.gz
if extension == ".gz" {
str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME")
str := "zcat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName
_, err := exec.Command("bash", "-c", str).Output()
if err != nil {
utils.Fatal("Error, in restoring the database %v", err)
@@ -115,7 +112,7 @@ func RestoreDatabase(file string) {
} else if extension == ".sql" {
//Restore from sql file
str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + os.Getenv("DB_HOST") + " -p " + os.Getenv("DB_PORT") + " -U " + os.Getenv("DB_USERNAME") + " -v -d " + os.Getenv("DB_NAME")
str := "cat " + fmt.Sprintf("%s/%s", tmpPath, file) + " | psql -h " + db.dbHost + " -p " + db.dbPort + " -U " + db.dbUserName + " -v -d " + db.dbName
_, err := exec.Command("bash", "-c", str).Output()
if err != nil {
utils.Fatal("Error in restoring the database %v", err)

View File

@@ -7,13 +7,13 @@ const algorithm = "aes256"
const gpgExtension = "gpg"
var (
storage = "local"
file = ""
dbPassword = ""
dbUserName = ""
dbName = ""
dbHost = ""
dbPort = "5432"
storage = "local"
file = ""
//dbPassword = ""
//dbUserName = ""
//dbName = ""
//dbHost = ""
//dbPort = "5432"
executionMode = "default"
storagePath = "/backup"
disableCompression = false
@@ -27,6 +27,16 @@ var dbHVars = []string{
"DB_USERNAME",
"DB_NAME",
}
var sdbRVars = []string{
"SOURCE_DB_HOST",
"SOURCE_DB_PORT",
"SOURCE_DB_NAME",
"SOURCE_DB_USERNAME",
"SOURCE_DB_PASSWORD",
}
var dbConf *dbConfig
var sDbConf *dbSourceConfig
// sshVars Required environment variables for SSH remote server storage
var sshVars = []string{

View File

@@ -1,19 +1,17 @@
package utils
/*****
* MySQL Backup & Restore
* PostgreSQL Backup & Restore
* @author Jonas Kaninda
* @license MIT License <https://opensource.org/licenses/MIT>
* @link https://github.com/jkaninda/mysql-bkup
* @link https://github.com/jkaninda/pg-bkup
**/
import (
"bytes"
"fmt"
"github.com/spf13/cobra"
"io"
"io/fs"
"os"
"os/exec"
)
func FileExists(filename string) bool {
@@ -90,49 +88,6 @@ func IsDirEmpty(name string) (bool, error) {
return true, nil
}
// TestDatabaseConnection tests the database connection
func TestDatabaseConnection() {
dbHost := os.Getenv("DB_HOST")
dbPassword := os.Getenv("DB_PASSWORD")
dbUserName := os.Getenv("DB_USERNAME")
dbName := os.Getenv("DB_NAME")
dbPort := os.Getenv("DB_PORT")
if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" {
Fatal("Please make sure all required database environment variables are set")
} else {
Info("Connecting to database ...")
// Test database connection
query := "SELECT version();"
// Set the environment variable for the database password
err := os.Setenv("PGPASSWORD", dbPassword)
if err != nil {
return
}
// Prepare the psql command
cmd := exec.Command("psql",
"-U", dbUserName, // database user
"-d", dbName, // database name
"-h", dbHost, // host
"-p", dbPort, // port
"-c", query, // SQL command to execute
)
// Capture the output
var out bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &out
// Run the command and capture any errors
err = cmd.Run()
if err != nil {
Error("Error running psql command: %v\nOutput: %s\n", err, out.String())
return
}
Info("Successfully connected to database")
}
}
func GetEnv(cmd *cobra.Command, flagName, envName string) string {
value, _ := cmd.Flags().GetString(flagName)
if value != "" {