diff --git a/README.md b/README.md index b827a04..e21a638 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,8 @@ Postgres Backup tool, backup database to S3 or Object Storage - [Docker Hub](https://hub.docker.com/r/jkaninda/pg-bkup) - [Github](https://github.com/jkaninda/pg-bkup) -> MySQL solution : +## MySQL solution : + - [MySQL](https://github.com/jkaninda/mysql-bkup) ## Storage: @@ -118,7 +119,7 @@ docker run --rm --network your_network_name --name pg-bkup -v $PWD/backup:/backu version: '3' services: pg-bkup: - image: jkaninda/pg-bkup:latest + image: jkaninda/pg-bkup container_name: pg-bkup command: - /bin/sh @@ -151,8 +152,8 @@ Simple S3 backup usage bkup --operation backup --storage s3 --dbname mydatabase ``` ```yaml - mysql-bkup: - image: jkaninda/pg-bkup:latest + pg-bkup: + image: jkaninda/pg-bkup container_name: pg-bkup tty: true privileged: true @@ -173,36 +174,93 @@ bkup --operation backup --storage s3 --dbname mydatabase - S3_ENDPOINT=${S3_ENDPOINT} ``` -## Run "docker run" from crontab +## Run in Scheduled mode -Make an automated backup (every night at 1). +This tool can be run as CronJob in Kubernetes for a regular backup which makes deployment on Kubernetes easy as Kubernetes has CronJob resources. +For Docker, you need to run it in scheduled mode by adding `--mode scheduled` flag and specify the periodical backup time by adding `--period "0 1 * * *"` flag. -> backup_script.sh +Make an automated backup on Docker -```sh -#!/bin/sh -DB_USERNAME='db_username' -DB_PASSWORD='password' -DB_HOST='db_hostname' -DB_PORT="5432" -DB_NAME='db_name' -BACKUP_DIR='/some/path/backup/' +## Syntax of crontab (field description) -docker run --rm --name pg-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_PORT=$DB_PORT" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/pg-bkup bkup -o backup -d $DB_NAME -``` +The syntax is: -```sh -chmod +x backup_script.sh -``` +- 1: Minute (0-59) +- 2: Hours (0-23) +- 3: Day (0-31) +- 4: Month (0-12 [12 == December]) +- 5: Day of the week(0-7 [7 or 0 == sunday]) -Your crontab looks like this: +Easy to remember format: ```conf -0 1 * * * /path/to/backup_script.sh +* * * * * command to be executed +``` + +```conf +- - - - - +| | | | | +| | | | ----- Day of week (0 - 7) (Sunday=0 or 7) +| | | ------- Month (1 - 12) +| | --------- Day of month (1 - 31) +| ----------- Hour (0 - 23) +------------- Minute (0 - 59) +``` + +> At every 30th minute + +```conf +*/30 * * * * +``` +> “At minute 0.” every hour +```conf +0 * * * * +``` + +> “At 01:00.” every day + +```conf +0 1 * * * +``` + +## Example of scheduled mode + +> Docker run : + +```sh +docker run --rm --name pg-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/pg-bkup bkup --operation backup --dbname $DB_NAME --mode scheduled --period "0 1 * * *" +``` + +> With Docker compose + +```yaml +version: "3" +services: + pg-bkup: + image: jkaninda/pg-bkup + container_name: pg-bkup + privileged: true + devices: + - "/dev/fuse" + command: + - /bin/sh + - -c + - bkup --operation backup --storage s3 --path /mys3_custome_path --dbname database_name --mode scheduled --period "*/30 * * * *" + environment: + - DB_PORT=5432 + - DB_HOST=postgreshost + - DB_USERNAME=userName + - DB_PASSWORD=${DB_PASSWORD} + - ACCESS_KEY=${ACCESS_KEY} + - SECRET_KEY=${SECRET_KEY} + - BUCKETNAME=${BUCKETNAME} + - S3_ENDPOINT=${S3_ENDPOINT} ``` ## Kubernetes CronJob +For Kubernetes you don't need to run it in scheduled mode. + Simple Kubernetes CronJob usage: ```yaml diff --git a/build.sh b/build.sh index 05f6bb7..a78cb4f 100755 --- a/build.sh +++ b/build.sh @@ -8,4 +8,4 @@ fi docker build -f src/docker/Dockerfile -t jkaninda/pg-bkup:$tag . -docker-compose up -d \ No newline at end of file +docker compose up -d \ No newline at end of file diff --git a/src/docker/Dockerfile b/src/docker/Dockerfile index e890463..bdc24b9 100644 --- a/src/docker/Dockerfile +++ b/src/docker/Dockerfile @@ -12,11 +12,11 @@ ENV ACCESS_KEY="" ENV SECRET_KEY="" ENV S3_ENDPOINT=https://s3.amazonaws.com ARG DEBIAN_FRONTEND=noninteractive -ENV VERSION="0.1" +ENV VERSION="0.2" RUN apt-get update -qq RUN apt-get install build-essential libcurl4-openssl-dev libxml2-dev mime-support -y -RUN apt install s3fs postgresql-client postgresql-client-common libpq-dev -y +RUN apt install s3fs postgresql-client postgresql-client-common libpq-dev supervisor cron -y # Clear cache RUN apt-get clean && rm -rf /var/lib/apt/lists/* @@ -29,6 +29,8 @@ RUN chmod 777 /tmp/s3cache COPY src/pg_bkup.sh /usr/local/bin/ RUN chmod +x /usr/local/bin/pg_bkup.sh +ADD src/supervisord.conf /etc/supervisor/supervisord.conf + RUN ln -s /usr/local/bin/pg_bkup.sh /usr/local/bin/pg_bkup RUN ln -s /usr/local/bin/pg_bkup.sh /usr/local/bin/bkup diff --git a/src/pg_bkup.sh b/src/pg_bkup.sh index c175453..3bc4e09 100755 --- a/src/pg_bkup.sh +++ b/src/pg_bkup.sh @@ -12,6 +12,9 @@ export TIMEOUT=60 export PGPASSWORD="" export FILE_COMPRESION=true export CONNECTION="" +export EXECUTION_MODE="default" +export SCHEDULE_PERIOD="0 1 * * *" +export FILE_COMPRESION=true usage_info() { echo "Usage: \\" @@ -47,6 +50,8 @@ help() echo " |--path -- Set s3 path, without file name" echo " -d |--dbname -- Set database name " echo " -p |--port -- Set database port (default: 3306)" + echo " -m |--mode -- Set execution mode (default: default)" + echo " |--period -- Set schedule period time (default: '0 1 * * *')" echo " -t |--timeout -- Set timeout (default: 120s)" echo " -h |--help -- Print this help message and exit" echo " -V |--version -- Print version information and exit" @@ -93,6 +98,16 @@ flags() [ $# = 0 ] && error "No database name specified" export DB_PORT="$1" shift;; + (-m|--mode) + shift + [ $# = 0 ] && error "No execution mode specified" + export EXECUTION_MODE="$1" + shift;; + (--period) + shift + [ $# = 0 ] && error "No schedule period entered" + export SCHEDULE_PERIOD="$1" + shift;; (-t|--timeout) shift [ $# = 0 ] && error "No timeout specified" @@ -185,8 +200,59 @@ else export STORAGE_PATH=/s3mnt$S3_PATH fi } +create_crontab_script() +{ +TASK=/usr/local/bin/backup_cron.sh +touch $TASK +if [ $STORAGE == 's3' ] +then +cat > "$TASK" < "$TASK" < "$CRON_JOB" <> /var/log/pg-bkup.log" +EOF +chmod 0644 /etc/cron.d/* +crontab /etc/cron.d/backup_cron +} +scheduled_mode() +{ + if [ $OPERATION == 'backup' ] + then + create_crontab_script + echo "" + echo "**********************************" + echo " Starting PostGres Bkup... " + echo "***********************************" + echo "Running in Scheduled mode " + echo "Execution period $SCHEDULE_PERIOD" + echo "Log file in /var/log/pg-bkup.log " + supervisord -c /etc/supervisor/supervisord.conf + else + echo "Scheduled mode supports only backup operation" + exit 1 + fi +} + flags "$@" # ? +if [ $EXECUTION_MODE == 'default' ] +then if [ $OPERATION != 'backup' ] then if [ $STORAGE != 's3' ] @@ -206,4 +272,11 @@ flags "$@" echo "Backup to s3 storage" s3_backup fi - fi \ No newline at end of file + fi +elif [ $EXECUTION_MODE == 'scheduled' ] +then + scheduled_mode +else +echo "Error, unknow execution mode!" +exit 1 +fi \ No newline at end of file diff --git a/src/supervisord.conf b/src/supervisord.conf new file mode 100644 index 0000000..84b35a1 --- /dev/null +++ b/src/supervisord.conf @@ -0,0 +1,13 @@ +[supervisord] +nodaemon=true +user=root +logfile=/var/log/supervisor/supervisord.log +pidfile=/var/run/supervisord.pid + +[program:cron] +command = /bin/bash -c "declare -p | grep -Ev '^declare -[[:alpha:]]*r' > /run/supervisord.env && /usr/sbin/cron -f -L 15" +autostart=true +autorestart=true +user = root +stderr_logfile=/var/log/cron.err.log +stdout_logfile=/var/log/cron.out.log \ No newline at end of file