Compare commits

..

12 Commits
v0.5 ... v0.7

Author SHA1 Message Date
9588c5bcee Merge pull request #49 from jkaninda/develop
Develop
2024-02-17 18:25:37 +01:00
e0457a4ed8 refactor: clean up code 2024-02-17 18:24:45 +01:00
d3efa3fc05 chore: update docker tag version 2024-02-17 18:23:27 +01:00
7bcde78136 feat: add backup prune, to delete old backup 2024-02-17 18:17:41 +01:00
b95601ab57 Merge pull request #48 from jkaninda/develop
chore: rename env variable in Dockerfile
2024-01-22 21:19:44 +01:00
facd57e2cd chore: rename env variable in Dockerfile 2024-01-22 21:18:51 +01:00
2fa7e50485 Merge pull request #47 from jkaninda/develop
docs: refactoring of code, update doc
2024-01-21 15:27:14 +01:00
f53c68cd5c New release 2024-01-21 15:26:20 +01:00
902695032c docs: add more details for env variables 2024-01-21 15:24:44 +01:00
620801cb99 refactor: refactoring of code, update docs 2024-01-21 15:18:35 +01:00
e19643ebcb Merge pull request #46 from jkaninda/refactor
docs: update docs
2024-01-20 14:34:36 +01:00
c87201d08d docs: update docs 2024-01-20 14:33:58 +01:00
17 changed files with 147 additions and 91 deletions

View File

@@ -35,5 +35,5 @@ jobs:
file: "./docker/Dockerfile"
platforms: linux/amd64,linux/arm64
tags: |
"${{env.BUILDKIT_IMAGE}}:v0.5"
"${{env.BUILDKIT_IMAGE}}:v0.7"
"${{env.BUILDKIT_IMAGE}}:latest"

3
.gitignore vendored
View File

@@ -7,4 +7,5 @@ test.md
.DS_Store
mysql-bkup
/.DS_Store
/.idea
/.idea
bin

32
Makefile Normal file
View File

@@ -0,0 +1,32 @@
BINARY_NAME=mysql-bkup
include .env
export
run:
go run .
build:
go build -o bin/${BINARY_NAME} .
compile:
GOOS=darwin GOARCH=arm64 go build -o bin/${BINARY_NAME}-darwin-arm64 .
GOOS=darwin GOARCH=amd64 go build -o bin/${BINARY_NAME}-darwin-amd64 .
GOOS=linux GOARCH=arm64 go build -o bin/${BINARY_NAME}-linux-arm64 .
GOOS=linux GOARCH=amd64 go build -o bin/${BINARY_NAME}-linux-amd64 .
docker-build:
docker build -f docker/Dockerfile -t jkaninda/mysql-bkup:latest .
docker-run: docker-build
docker run --rm --network internal --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" jkaninda/mysql-bkup bkup backup --prune --keep-last 2
docker-run-scheduled: docker-build
docker run --rm --network internal --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -v "./backup:/backup" jkaninda/mysql-bkup bkup backup --prune --keep-last=2 --mode scheduled --period "* * * * *"
docker-run-scheduled-s3: docker-build
docker run --rm --network internal --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" jkaninda/mysql-bkup bkup backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *"
docker-restore-s3: docker-build
docker run --rm --network internal --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "FILE_NAME=${FILE_NAME}" jkaninda/mysql-bkup bkup restore --storage s3 --path /custom-path

View File

@@ -38,23 +38,41 @@ MySQL Backup and Restoration tool. Backup database to AWS S3 storage or any S3 A
## Usage
| Options | Shorts | Usage |
|-----------------------|--------|--------------------------------------------------------------------|
| mysql-bkup | bkup | CLI utility |
| backup | | Backup database operation |
| restore | | Restore database operation |
| history | | Show the history of backup |
| --storage | -s | Set storage. local or s3 (default: local) |
| --file | -f | Set file name for restoration |
| --path | | Set s3 path without file name. eg: /custom_path |
| --dbname | -d | Set database name |
| --port | -p | Set database port (default: 3306) |
| --mode | -m | Set execution mode. default or scheduled (default: default) |
| --disable-compression | | Disable database backup compression |
| --period | | Set crontab period for scheduled mode only. (default: "0 1 * * *") |
| --timeout | -t | Set timeout (default: 60s) |
| --help | -h | Print this help message and exit |
| --version | -V | Print version information and exit |
| Options | Shorts | Usage |
|-----------------------|--------|-----------------------------------------------------------------------|
| mysql-bkup | bkup | CLI utility |
| backup | | Backup database operation |
| restore | | Restore database operation |
| history | | Show the history of backup |
| --storage | -s | Set storage. local or s3 (default: local) |
| --file | -f | Set file name for restoration |
| --path | | Set s3 path without file name. eg: /custom_path |
| --dbname | -d | Set database name |
| --port | -p | Set database port (default: 3306) |
| --mode | -m | Set execution mode. default or scheduled (default: default) |
| --disable-compression | | Disable database backup compression |
| --prune | | Delete old backup |
| --keep-last | | keep all backup and delete within this time interval, default 7 days |
| --period | | Set crontab period for scheduled mode only. (default: "0 1 * * *") |
| --timeout | -t | Set timeout (default: 60s) |
| --help | -h | Print this help message and exit |
| --version | -V | Print version information and exit |
## Environment variables
| Name | Requirement | Description |
|-------------|--------------------------------------------------|----------------------|
| DB_PORT | Optional, default 3306 | Database port number |
| DB_HOST | Required | Database host |
| DB_NAME | Optional if it was provided from the -d flag | Database name |
| DB_USERNAME | Required | Database user name |
| DB_PASSWORD | Required | Database password |
| ACCESS_KEY | Optional, required for S3 storage | AWS S3 Access Key |
| SECRET_KEY | Optional, required for S3 storage | AWS S3 Secret Key |
| BUCKET_NAME | Optional, required for S3 storage | AWS S3 Bucket Name |
| S3_ENDPOINT | Optional, required for S3 storage | AWS S3 Endpoint |
| FILE_NAME | Optional if it was provided from the --file flag | File to restore |
## Note:
@@ -83,20 +101,20 @@ FLUSH PRIVILEGES;
Simple backup usage
```sh
bkup backup --dbname database_name
mysql-bkup backup --dbname database_name
```
```sh
bkup backup -d database_name
mysql-bkup backup -d database_name
```
### S3
```sh
bkup backup --storage s3 --dbname database_name
mysql-bkup backup --storage s3 --dbname database_name
```
## Docker run:
```sh
docker run --rm --network your_network_name --name mysql-bkup -v $PWD/backup:/backup/ -e "DB_HOST=database_host_name" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" jkaninda/mysql-bkup:latest bkup backup -d database_name
docker run --rm --network your_network_name --name mysql-bkup -v $PWD/backup:/backup/ -e "DB_HOST=database_host_name" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" jkaninda/mysql-bkup:latest mysql-bkup backup -d database_name
```
## Docker compose file:
@@ -117,7 +135,7 @@ services:
command:
- /bin/sh
- -c
- bkup backup -d database_name
- mysql-bkup backup -d database_name
volumes:
- ./backup:/backup
environment:
@@ -131,22 +149,22 @@ services:
Simple database restore operation usage
```sh
bkup restore --dbname database_name --file database_20231217_115621.sql
mysql-bkup restore --dbname database_name --file database_20231217_115621.sql
```
```sh
bkup restore -f database_20231217_115621.sql
mysql-bkup restore -f database_20231217_115621.sql
```
### S3
```sh
bkup restore --storage s3 --file database_20231217_115621.sql
mysql-bkup restore --storage s3 --file database_20231217_115621.sql
```
## Docker run:
```sh
docker run --rm --network your_network_name --name mysql-bkup -v $PWD/backup:/backup/ -e "DB_HOST=database_host_name" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" jkaninda/mysql-bkup bkup backup -d database_name -f db_20231219_022941.sql.gz
docker run --rm --network your_network_name --name mysql-bkup -v $PWD/backup:/backup/ -e "DB_HOST=database_host_name" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" jkaninda/mysql-bkup mysql-bkup backup -d database_name -f db_20231219_022941.sql.gz
```
## Docker compose file:
@@ -168,7 +186,7 @@ services:
command:
- /bin/sh
- -c
- bkup restore --file database_20231217_115621.sql --dbname database_name
- mysql-bkup restore --file database_20231217_115621.sql --dbname database_name
volumes:
- ./backup:/backup
environment:
@@ -187,7 +205,7 @@ docker-compose up -d
## Backup to S3
```sh
docker run --rm --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=db_hostname" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" -e "ACCESS_KEY=your_access_key" -e "SECRET_KEY=your_secret_key" -e "BUCKETNAME=your_bucket_name" -e "S3_ENDPOINT=https://s3.us-west-2.amazonaws.com" jkaninda/mysql-bkup bkup backup -s s3 -d database_name
docker run --rm --privileged --device /dev/fuse --name mysql-bkup -e "DB_HOST=db_hostname" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" -e "ACCESS_KEY=your_access_key" -e "SECRET_KEY=your_secret_key" -e "BUCKETNAME=your_bucket_name" -e "S3_ENDPOINT=https://s3.us-west-2.amazonaws.com" jkaninda/mysql-bkup mysql-bkup backup -s s3 -d database_name
```
> To change s3 backup path add this flag : --path /myPath . default path is /mysql_bkup
@@ -217,7 +235,7 @@ services:
- DB_PASSWORD=password
- ACCESS_KEY=${ACCESS_KEY}
- SECRET_KEY=${SECRET_KEY}
- BUCKETNAME=${BUCKETNAME}
- BUCKET_NAME=${BUCKET_NAME}
- S3_ENDPOINT=${S3_ENDPOINT}
```
@@ -275,7 +293,7 @@ Easy to remember format:
> Docker run :
```sh
docker run --rm --name mysql-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/mysql-bkup bkup backup --dbname $DB_NAME --mode scheduled --period "0 1 * * *"
docker run --rm --name mysql-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/mysql-bkup mysql-bkup backup --dbname $DB_NAME --mode scheduled --period "0 1 * * *"
```
> With Docker compose
@@ -292,7 +310,7 @@ services:
command:
- /bin/sh
- -c
- bkup backup --storage s3 --path /mys3_custome_path --dbname database_name --mode scheduled --period "*/30 * * * *"
- mysql-bkup backup --storage s3 --path /mys3_custome_path --dbname database_name --mode scheduled --period "*/30 * * * *"
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
@@ -300,7 +318,7 @@ services:
- DB_PASSWORD=${DB_PASSWORD}
- ACCESS_KEY=${ACCESS_KEY}
- SECRET_KEY=${SECRET_KEY}
- BUCKETNAME=${BUCKETNAME}
- BUCKET_NAME=${BUCKET_NAME}
- S3_ENDPOINT=${S3_ENDPOINT}
```
@@ -329,7 +347,7 @@ spec:
command:
- /bin/sh
- -c
- bkup backup -s s3 --path /custom_path
- mysql-bkup backup -s s3 --path /custom_path
env:
- name: DB_PORT
value: "3306"
@@ -346,7 +364,7 @@ spec:
value: ""
- name: SECRET_KEY
value: ""
- name: BUCKETNAME
- name: BUCKET_NAME
value: ""
- name: S3_ENDPOINT
value: "https://s3.us-west-2.amazonaws.com"

View File

@@ -1,14 +0,0 @@
#!/usr/bin/env bash
if [ $# -eq 0 ]
then
tag='latest'
else
tag=$1
fi
#go build
CGO_ENABLED=0 GOOS=linux go build
docker build -f docker/Dockerfile -t jkaninda/mysql-bkup:$tag .
#docker compose up -d --force-recreate

View File

@@ -23,6 +23,8 @@ func init() {
//Backup
BackupCmd.PersistentFlags().StringP("mode", "m", "default", "Set execution mode. default or scheduled")
BackupCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Set schedule period time")
BackupCmd.PersistentFlags().BoolP("prune", "", false, "Prune old backup")
BackupCmd.PersistentFlags().IntP("keep-last", "", 7, "keep all backup and delete within this time interval, default 7 days")
BackupCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression")
}

View File

@@ -5,7 +5,6 @@ Copyright © 2024 Jonas Kaninda <jonaskaninda@gmail.com>
package cmd
import (
"fmt"
"github.com/jkaninda/mysql-bkup/utils"
"github.com/spf13/cobra"
"os"
@@ -18,16 +17,6 @@ var rootCmd = &cobra.Command{
Long: `MySQL Database backup and restoration tool. Backup database to AWS S3 storage or any S3 Alternatives for Object Storage.`,
Example: utils.MainExample,
Version: appVersion,
//TODO: To remove
//For old user || To remove
Run: func(cmd *cobra.Command, args []string) {
if operation != "" {
if operation == "backup" || operation == "restore" {
fmt.Println(utils.Notice)
utils.Fatal("New config required, please check --help")
}
}
},
}
var operation = ""
var s3Path = "/mysql-bkup"
@@ -49,11 +38,6 @@ func init() {
rootCmd.PersistentFlags().IntP("port", "p", 3306, "Set database port")
rootCmd.PersistentFlags().StringVarP(&operation, "operation", "o", "", "Set operation, for old version only")
rootCmd.PersistentFlags().StringP("mode", "m", "default", "Set execution mode. default or scheduled")
rootCmd.PersistentFlags().StringP("period", "", "0 1 * * *", "Set schedule period time")
rootCmd.PersistentFlags().BoolP("disable-compression", "", false, "Disable backup compression")
rootCmd.PersistentFlags().StringP("file", "f", "", "File name of database")
rootCmd.AddCommand(VersionCmd)
rootCmd.AddCommand(BackupCmd)
rootCmd.AddCommand(RestoreCmd)

View File

@@ -16,12 +16,12 @@ ENV DB_USERNAME=""
ENV DB_PASSWORD=""
ENV DB_PORT="3306"
ENV STORAGE=local
ENV BUCKETNAME=""
ENV BUCKET_NAME=""
ENV ACCESS_KEY=""
ENV SECRET_KEY=""
ENV S3_ENDPOINT=https://s3.amazonaws.com
ARG DEBIAN_FRONTEND=noninteractive
ENV VERSION="v0.5"
ENV VERSION="v0.6"
LABEL authors="Jonas Kaninda"
RUN apt-get update -qq

View File

@@ -9,7 +9,7 @@ services:
command:
- /bin/sh
- -c
- bkup backup --storage s3 --path /mys3_custom_path --dbname database_name
- mysql-bkup backup --storage s3 --path /mys3_custom_path --dbname database_name
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
@@ -17,5 +17,5 @@ services:
- DB_PASSWORD=${DB_PASSWORD}
- ACCESS_KEY=${ACCESS_KEY}
- SECRET_KEY=${SECRET_KEY}
- BUCKETNAME=${BUCKETNAME}
- BUCKET_NAME=${BUCKET_NAME}
- S3_ENDPOINT=https://s3.us-west-2.amazonaws.com

View File

@@ -6,7 +6,7 @@ services:
command:
- /bin/sh
- -c
- bkup backup --dbname database_name --mode scheduled --period "0 1 * * *"
- mysql-bkup backup --dbname database_name --mode scheduled --period "0 1 * * *"
volumes:
- ./backup:/backup
environment:

View File

@@ -9,7 +9,7 @@ services:
command:
- /bin/sh
- -c
- bkup backup --storage s3 --path /mys3_custom_path --dbname database_name --mode scheduled --period "0 1 * * *"
- mysql-bkup backup --storage s3 --path /mys3_custom_path --dbname database_name --mode scheduled --period "0 1 * * *"
environment:
- DB_PORT=3306
- DB_HOST=mysqlhost
@@ -17,5 +17,5 @@ services:
- DB_PASSWORD=${DB_PASSWORD}
- ACCESS_KEY=${ACCESS_KEY}
- SECRET_KEY=${SECRET_KEY}
- BUCKETNAME=${BUCKETNAME}
- BUCKET_NAME=${BUCKET_NAME}
- S3_ENDPOINT=https://s3.us-west-2.amazonaws.com

View File

@@ -6,7 +6,7 @@ services:
command:
- /bin/sh
- -c
- bkup backup --dbname database_name
- mysql-bkup backup --dbname database_name
volumes:
- ./backup:/backup
environment:

View File

@@ -16,7 +16,7 @@ spec:
command:
- /bin/sh
- -c
- bkup backup --storage s3 --path /custom_path
- mysql-bkup backup --storage s3 --path /custom_path
env:
- name: DB_PORT
value: "3306"

View File

@@ -11,12 +11,11 @@ import (
"log"
"os"
"os/exec"
"path/filepath"
"time"
)
func StartBackup(cmd *cobra.Command) {
_, _ = cmd.Flags().GetString("operation")
//Set env
utils.SetEnv("STORAGE_PATH", storagePath)
utils.GetEnv(cmd, "dbname", "DB_NAME")
@@ -28,15 +27,17 @@ func StartBackup(cmd *cobra.Command) {
storage = utils.GetEnv(cmd, "storage", "STORAGE")
file = utils.GetEnv(cmd, "file", "FILE_NAME")
disableCompression, _ = cmd.Flags().GetBool("disable-compression")
keepLast, _ := cmd.Flags().GetInt("keep-last")
prune, _ := cmd.Flags().GetBool("prune")
executionMode, _ = cmd.Flags().GetString("mode")
if executionMode == "default" {
if storage == "s3" {
utils.Info("Backup database to s3 storage")
s3Backup(disableCompression, s3Path)
s3Backup(disableCompression, s3Path, prune, keepLast)
} else {
utils.Info("Backup database to local storage")
BackupDatabase(disableCompression)
BackupDatabase(disableCompression, prune, keepLast)
}
} else if executionMode == "scheduled" {
@@ -72,7 +73,7 @@ func scheduledMode() {
}
// BackupDatabase backup database
func BackupDatabase(disableCompression bool) {
func BackupDatabase(disableCompression bool, prune bool, keepLast int) {
dbHost = os.Getenv("DB_HOST")
dbPassword := os.Getenv("DB_PASSWORD")
dbUserName := os.Getenv("DB_USERNAME")
@@ -140,6 +141,10 @@ func BackupDatabase(disableCompression bool) {
log.Fatal(err)
}
utils.Done("Database has been backed up")
//Delete old backup
if prune {
deleteOldBackup(keepLast)
}
}
@@ -155,8 +160,39 @@ func BackupDatabase(disableCompression bool) {
}
func s3Backup(disableCompression bool, s3Path string) {
func s3Backup(disableCompression bool, s3Path string, prune bool, keepLast int) {
// Backup Database to S3 storage
MountS3Storage(s3Path)
BackupDatabase(disableCompression)
BackupDatabase(disableCompression, prune, keepLast)
}
func deleteOldBackup(keepLast int) {
utils.Info("Deleting old backups...")
storagePath = os.Getenv("STORAGE_PATH")
// Define the directory path
backupDir := storagePath + "/"
// Get current time
currentTime := time.Now()
// Walk through files in the directory
err := filepath.Walk(backupDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Check if the file is older than defined day days
if info.Mode().IsRegular() && info.ModTime().Before(currentTime.AddDate(0, 0, -keepLast)) {
// Remove the file
err := os.Remove(path)
if err != nil {
utils.Fatal("Error removing file ", path, err)
} else {
utils.Done("Removed file: ", path)
}
}
return nil
})
if err != nil {
utils.Fatal("Error walking through directory: ", err)
}
}

View File

@@ -39,7 +39,7 @@ func RestoreDatabase(file string) {
dbPort = os.Getenv("DB_PORT")
storagePath = os.Getenv("STORAGE_PATH")
if file == "" {
utils.Fatal("Error required --file")
utils.Fatal("Error, file required")
}
if os.Getenv("DB_HOST") == "" || os.Getenv("DB_NAME") == "" || os.Getenv("DB_USERNAME") == "" || os.Getenv("DB_PASSWORD") == "" || file == "" {

View File

@@ -26,11 +26,14 @@ func S3Mount() {
func MountS3Storage(s3Path string) {
accessKey = os.Getenv("ACCESS_KEY")
secretKey = os.Getenv("SECRET_KEY")
bucketName = os.Getenv("BUCKETNAME")
bucketName = os.Getenv("BUCKET_NAME")
if bucketName == "" {
bucketName = os.Getenv("BUCKETNAME")
}
s3Endpoint = os.Getenv("S3_ENDPOINT")
if accessKey == "" || secretKey == "" || bucketName == "" {
utils.Fatal("Please make sure all environment variables are set")
utils.Fatal("Please make sure all environment variables are set for S3")
} else {
storagePath := fmt.Sprintf("%s%s", s3MountPath, s3Path)
err := os.Setenv("STORAGE_PATH", storagePath)

View File

@@ -1,11 +1,5 @@
package utils
const Notice = "Please remove --operation flag.\n" +
"Use: \n" +
"- backup for database backup operation [eg: bkup backup -d database_name ...]\n" +
"- restore for database restore operation [eg. bkup restore -d database_name ...]\n" +
"Example: bkup backup --storage s3 ...( instead of < bkup --operation backup >)\n" +
"We are sorry for this inconvenient\n"
const RestoreExample = "mysql-bkup restore --dbname database --file db_20231219_022941.sql.gz\n" +
"bkup restore --dbname database --storage s3 --path /custom-path --file db_20231219_022941.sql.gz"
const BackupExample = "mysql-bkup backup --dbname database --disable-compression\n" +