From da68aae5b174a82c3bccfdcfce2337f0b5a2e027 Mon Sep 17 00:00:00 2001 From: Jonas Kaninda Date: Fri, 22 Dec 2023 07:13:49 +0100 Subject: [PATCH] Initial commit --- .DS_Store | Bin 0 -> 6148 bytes .github/workflows/build.yml | 39 ++++++ .gitignore | 5 + README.md | 232 ++++++++++++++++++++++++++++++++++++ backup_script.sh | 8 ++ build.sh | 11 ++ k8s-job.yaml | 31 +++++ src/docker/Dockerfile | 36 ++++++ src/pg_bkup.sh | 210 ++++++++++++++++++++++++++++++++ 9 files changed, 572 insertions(+) create mode 100644 .DS_Store create mode 100644 .github/workflows/build.yml create mode 100644 .gitignore create mode 100644 README.md create mode 100755 backup_script.sh create mode 100755 build.sh create mode 100644 k8s-job.yaml create mode 100644 src/docker/Dockerfile create mode 100755 src/pg_bkup.sh diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..8deb5f7d743f118984db5adebb8711341d305e8f GIT binary patch literal 6148 zcmeHKL2uJA6n<{Ynrs90Fr;0OB600XsSsMVODWxf1D6%S0Z>VrY7I-{s!3N()uf)` zKkyg0@<-smaDwkS9!kF{6M{;Qv-Y{qU(lBZ}xe|N9l6G4(IMl&K`D@H?e|5*pK- zyu5emwBWt{$2$NkB1tnE6*%7u91EN=Mu;&6`(Nb!=@~Yfdzl$+kw-2K=?wFkuKQAB zldrdkr+TOQ=1s#OPMXahqExQj*{W9U>Q422e=HaM#7*XL*PXrP)+;GP@9Nq0UI(Ma zptkp1Mu{6lqnRQE{s=Db-UN{^7hO4z{8(}O=?mMo>_KgRxoou#>rU%Q`?BsV+wErE zdD?1TUfS0EhmVd<24~?UlAo03B(Sq8yQ}dE`0M2IKkv_?P)7V6m{ZGYLj!0!grWgW zp=|@LaY3)I;AFqH1^yiVA^98qv-<8KzN87_OGF>hL80BC3w)~yh52#C1B{!Ev-_7k z&OghmV4f~=a|`i*7W{UxS`IP81Nw-WddThCd8zfBjc@7960BydWQq&pa2{FxY&xN1 ztk@pC1U8VlpJ$YK^=C&YV^%X7=Dm2$=i;JyL#9`jwR1!9OcObcsMHkzSV6ZkJgdL{K!-bk9gV9L(E}4H6_`?mzG4V{a}X&V?>pLmm10UKp+-g= zJ+ja@6rnF3!cp!dIw_jkC}0%GD^O0GH9r53H{bvBB-1hq7zJ*X0;1CE_qs?)pRH@j x@mcGjd_ZAizg3Egf=VC9>Vc2qeH3MQ=kNotqj8lYdSK>{fRe#fMuC5-z;EaP3Gn~` literal 0 HcmV?d00001 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..26def6a --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,39 @@ +name: Build +on: + push: + branches: [ "main" ] + workflow_dispatch: + inputs: + docker_tag: + description: 'Docker tag' + required: true + default: 'latest' + type: string +env: + BUILDKIT_IMAGE: jkaninda/pg-bkup +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - + name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v3 + with: + push: true + file: "./src/docker/Dockerfile" + platforms: linux/amd64,linux/arm64 + tags: | + "${{env.BUILDKIT_IMAGE}}:latest" + "${{env.BUILDKIT_IMAGE}}:v0.1" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2a50d4c --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +/.history +backup +data +compose.yaml +.env \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..fa1b2b0 --- /dev/null +++ b/README.md @@ -0,0 +1,232 @@ +# Postgres Backup +Postgres Backup tool, backup database to S3 or Object Storage + +- Docker +- Kubernetes + +[![Build](https://github.com/jkaninda/pg-bkup/actions/workflows/build.yml/badge.svg)](https://github.com/jkaninda/pg-bkup/actions/workflows/build.yml) +![Docker Image Size (latest by date)](https://img.shields.io/docker/image-size/jkaninda/pg-bkup?style=flat-square) +![Docker Pulls](https://img.shields.io/docker/pulls/jkaninda/pg-bkup?style=flat-square) + +- [Docker Hub](https://hub.docker.com/r/jkaninda/pg-bkup) +- [Github](https://github.com/jkaninda/pg-bkup) + +## Storage: +- local +- s3 +- Object storage +## Usage + +| Options | Shorts | Usage | +|---------------|--------|------------------------------------| +| pg_bkup | bkup | CLI utility | +| --operation | -o | Set operation. backup or restore (default: backup) | +| --storage | -s | Set storage. local or s3 (default: local) | +| --file | -f | Set file name for restoration | +| --path | | Set s3 path without file name. eg: /custom_path | +| --dbname | -d | Set database name | +| --port | -p | Set database port (default: 3306) | +| --timeout | -t | Set timeout (default: 60s) | +| --help | -h | Print this help message and exit | +| --version | -V | Print version information and exit | + +## Backup database : + +Simple backup usage + +```sh +bkup --operation backup +``` +```sh +bkup -o backup +``` +### S3 + +```sh +bkup --operation backup --storage s3 +``` +## Docker run: + +```sh +docker run --rm --network your_network_name --name mysql-bkup -v $PWD/backup:/backup/ -e "DB_HOST=database_host_name" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" jkaninda/pg-bkup bkup -o backup -d database_name +``` + +## Docker compose file: +```yaml +version: '3' +services: + postgres: + image: postgres:14.5 + container_name: postgres + pull_policy: if_not_present + restart: unless-stopped + volumes: + - ./postgres:/var/lib/postgresql/data + environment: + POSTGRES_DB: bkup + POSTGRES_PASSWORD: password + POSTGRES_USER: bkup + mysql-bkup: + image: jkaninda/mysql-bkup:latest + container_name: mysql-bkup + command: + - /bin/sh + - -c + - bkup --operation backup -db mariadb + volumes: + - ./backup:/backup + environment: + - DB_PORT=5432 + - DB_HOST=postgres + - DB_NAME=mariadb + - DB_USERNAME=mariadb + - DB_PASSWORD=password +``` +## Restore database : + +Simple database restore operation usage + +```sh +bkup --operation restore --file database_20231217_115621.sql --dbname database_name +``` + +```sh +bkup -o restore -f database_20231217_115621.sql -d database_name +``` +### S3 + +```sh +bkup --operation restore --storage s3 --file database_20231217_115621.sql --dbname database_name +``` + +## Docker run: + +```sh +docker run --rm --network your_network_name --name pg-bkup -v $PWD/backup:/backup/ -e "DB_HOST=database_host_name" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" jkaninda/mysql-bkup:latest bkup -o backup -d database_name -f napata_20231219_022941.sql.gz +``` + +## Docker compose file: + +```yaml +version: '3' +services: + pg-bkup: + image: jkaninda/pg-bkup:latest + container_name: pg-bkup + command: + - /bin/sh + - -c + - bkup --operation restore --file database_20231217_115621.sql -d database_name + volumes: + - ./backup:/backup + environment: + #- FILE_NAME=mariadb_20231217_040238.sql # Optional if file name is set from command + - DB_PORT=5432 + - DB_HOST=postgres + - DB_USERNAME=user_name + - DB_PASSWORD=password +``` +## Run + +```sh +docker-compose up -d +``` +## Backup to S3 + +```sh +docker run --rm --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=db_hostname" -e "DB_USERNAME=username" -e "DB_PASSWORD=password" -e "ACCESS_KEY=your_access_key" -e "SECRET_KEY=your_secret_key" -e "BUCKETNAME=your_bucket_name" -e "S3_ENDPOINT=https://eu2.contabostorage.com" jkaninda/mysql-bkup:latest bkup -o backup -s s3 -d database_name +``` +> To change s3 backup path add this flag : --path myPath . default path is /mysql_bkup + +Simple S3 backup usage + +```sh +bkup --operation backup --storage s3 --dbname mydatabase +``` +```yaml + mysql-bkup: + image: jkaninda/mysql-bkup:latest + container_name: mysql-bkup + tty: true + privileged: true + devices: + - "/dev/fuse" + command: + - /bin/sh + - -c + - pg_bkup --operation restore --source s3 -f database_20231217_115621.sql.gz --dbname database_name + environment: + - DB_PORT=3306 + - DB_HOST=postgress + - DB_USERNAME=user_name + - DB_PASSWORD=password + - ACCESS_KEY=${ACCESS_KEY} + - SECRET_KEY=${SECRET_KEY} + - BUCKETNAME=${BUCKETNAME} + - S3_ENDPOINT=${S3_ENDPOINT} + +``` +## Run "docker run" from crontab + +Make an automated backup (every night at 1). + +> backup_script.sh + +```sh +#!/bin/sh +DB_USERNAME='db_username' +DB_PASSWORD='password' +DB_HOST='db_hostname' +DB_NAME='db_name' +BACKUP_DIR='/some/path/backup/' + +docker run --rm --name mysql-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/pg-bkup bkup -o backup -d $DB_NAME +``` + +```sh +chmod +x backup_script.sh +``` + +Your crontab looks like this: + +```conf +0 1 * * * /path/to/backup_script.sh +``` + +## Kubernetes CronJob + +Simple Kubernetes CronJob usage: + +```yaml +apiVersion: batch/v1 +kind: CronJob +metadata: + name: pg-bkup-job +spec: + schedule: "0 0 * * *" + jobTemplate: + spec: + template: + spec: + backoffLimit: 4 + containers: + - name: pg-bkup + image: jkaninda/pg-bkup + command: + - /bin/sh + - -c + - bkup --operation backup + env: + - name: DB_PORT + value: "3306" + - name: DB_HOST + value: "postgress-svc" + - name: DB_NAME + value: "database_name" + - name: DB_USERNAME + value: "db_name" + # Please use secret instead! + - name: DB_PASSWORD + value: "password" + restartPolicy: Never +``` \ No newline at end of file diff --git a/backup_script.sh b/backup_script.sh new file mode 100755 index 0000000..c10fa5f --- /dev/null +++ b/backup_script.sh @@ -0,0 +1,8 @@ +#!/bin/sh +DB_USERNAME='db_username' +DB_PASSWORD='password' +DB_HOST='db_hostname' +DB_NAME='db_name' +BACKUP_DIR="$PWD/backup" + +docker run --rm --name pg-bkup -v $BACKUP_DIR:/backup/ -e "DB_HOST=$DB_HOST" -e "DB_USERNAME=$DB_USERNAME" -e "DB_PASSWORD=$DB_PASSWORD" jkaninda/pg-bkup bkup -o backup -d $DB_NAME \ No newline at end of file diff --git a/build.sh b/build.sh new file mode 100755 index 0000000..05f6bb7 --- /dev/null +++ b/build.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +if [ $# -eq 0 ] + then + tag='latest' + else + tag=$1 +fi + +docker build -f src/docker/Dockerfile -t jkaninda/pg-bkup:$tag . + +docker-compose up -d \ No newline at end of file diff --git a/k8s-job.yaml b/k8s-job.yaml new file mode 100644 index 0000000..0704837 --- /dev/null +++ b/k8s-job.yaml @@ -0,0 +1,31 @@ +apiVersion: batch/v1 +kind: CronJob +metadata: + name: pg-bkup-job +spec: + schedule: "0 0 * * *" + jobTemplate: + spec: + template: + spec: + backoffLimit: 4 + containers: + - name: pg-bkup + image: jkaninda/pg-bkup + command: + - /bin/sh + - -c + - bkup --operation backup + env: + - name: DB_PORT + value: "5432" + - name: DB_HOST + value: "porstgress-svc" + - name: DB_NAME + value: "database_name" + - name: DB_USERNAME + value: "user_name" + # Please use secret! + - name: DB_PASSWORD + value: "password" + restartPolicy: Never \ No newline at end of file diff --git a/src/docker/Dockerfile b/src/docker/Dockerfile new file mode 100644 index 0000000..e890463 --- /dev/null +++ b/src/docker/Dockerfile @@ -0,0 +1,36 @@ +FROM ubuntu:24.04 +ENV DB_HOST="" +ENV DB_NAME="" +ENV DB_USERNAME="" +ENV DB_PASSWORD="" +ENV DB_PORT="5432" +ENV DESTINATION=local +ENV STORAGE=local +ENV SOURCE=local +ENV BUCKETNAME="" +ENV ACCESS_KEY="" +ENV SECRET_KEY="" +ENV S3_ENDPOINT=https://s3.amazonaws.com +ARG DEBIAN_FRONTEND=noninteractive +ENV VERSION="0.1" + +RUN apt-get update -qq +RUN apt-get install build-essential libcurl4-openssl-dev libxml2-dev mime-support -y +RUN apt install s3fs postgresql-client postgresql-client-common libpq-dev -y + +# Clear cache +RUN apt-get clean && rm -rf /var/lib/apt/lists/* + +RUN mkdir /s3mnt +RUN mkdir /tmp/s3cache +RUN chmod 777 /s3mnt +RUN chmod 777 /tmp/s3cache + +COPY src/pg_bkup.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/pg_bkup.sh + +RUN ln -s /usr/local/bin/pg_bkup.sh /usr/local/bin/pg_bkup +RUN ln -s /usr/local/bin/pg_bkup.sh /usr/local/bin/bkup + +RUN mkdir /backup +WORKDIR /backup \ No newline at end of file diff --git a/src/pg_bkup.sh b/src/pg_bkup.sh new file mode 100755 index 0000000..69e0147 --- /dev/null +++ b/src/pg_bkup.sh @@ -0,0 +1,210 @@ +#!/usr/bin/env bash + +set -e +TIME=$(date +%Y%m%d_%H%M%S) +MY_SQL_DUMP=/usr/bin/mysqldump +arg0=$(basename "$0" .sh) +blnk=$(echo "$arg0" | sed 's/./ /g') +export OPERATION=backup +export STORAGE=local +export STORAGE_PATH=/backup +export S3_PATH=/mysql-bkup +export TIMEOUT=60 +export PGPASSWORD="" +export FILE_COMPRESION=true +export CONNECTION="" +usage_info() +{ + echo "Usage: \\" + echo " $blnk Backup: pg_bkup -o backup -s s3 \\" + echo " $blnk Restore: pg_bkup -o restore -s s3 -f my_db.sql \\" + echo " $blnk [-o|--operation] [{-f|--file} ] [{-s|--storage} ] [{-h|--help} ] \\" + +} +version_info() +{ + echo "Version: $VERSION" + exit 0 +} +usage() +{ + exec 1>2 # Send standard output to standard error + usage_info + exit 0 +} + +error() +{ + echo "$arg0: $*" >&2 + exit 0 +} + +help() +{ + echo + echo " -o |--operation -- Set operation (default: backup)" + echo " -s |--storage -- Set storage (default: local)" + echo " -f |--file -- Set file name " + echo " |--path -- Set s3 path, without file name" + echo " -d |--dbname -- Set database name " + echo " -p |--port -- Set database port (default: 3306)" + echo " -t |--timeout -- Set timeout (default: 120s)" + echo " -h |--help -- Print this help message and exit" + echo " -V |--version -- Print version information and exit" + exit 0 +} + +flags() +{ + while test $# -gt 0 + do + case "$1" in + (-o|--operation) + shift + [ $# = 0 ] && error "No operation specified - restore or backup" + export OPERATION="$1" + shift;; + (-d|--dbname) + shift + [ $# = 0 ] && error "No database name specified" + export DB_NAME="$1" + shift;; + (-s|--storage) + shift + [ $# = 0 ] && error "No storage specified - local or s3 | default local" + export STORAGE="$1" + shift;; + (-f|--file) + shift + [ $# = 0 ] && error "No file specified - file to restore" + export FILE_NAME="$1" + shift;; + (--path) + shift + [ $# = 0 ] && error "No s3 path specified - s3 path without file name" + export S3_PATH="$1" + shift;; + (-db|--database) + shift + [ $# = 0 ] && error "No database name specified" + export DB_NAME="$1" + shift;; + (-p|--port) + shift + [ $# = 0 ] && error "No database name specified" + export DB_PORT="$1" + shift;; + (-t|--timeout) + shift + [ $# = 0 ] && error "No timeout specified" + export TIMEOUT="$1" + shift;; + (-h|--help) + help;; + (-V|--version) + version_info;; + (--) + help;; + (*) usage;; + esac + done +} +create_pgpass(){ + export CONNECTION=${DB_HOST}:${DB_PORT}:${DB_DATABASE}:${DB_USERNAME}:${DB_PASSWORD} + echo $CONNECTION > ~/.pgpass + chmod 600 ~/.pgpass + +} +backup() +{ + if [ -z "${DB_HOST}"] || [ -z "${DB_NAME}"] || [ -z "${DB_USERNAME}"] || [ -z "${DB_PASSWORD}"]; then + echo "Please make sure all required options are set " +else + export PGPASSWORD=${DB_PASSWORD} + ## Test database connection + + ## Backup database + pg_dump -h ${DB_HOST} -p ${DB_PORT} -U ${DB_USERNAME} -d ${DB_NAME} -v | gzip > ${STORAGE_PATH}/${DB_NAME}_${TIME}.sql.gz + echo "Database has been saved" +fi +exit 0 +} + +restore() +{ +if [ -z "${DB_HOST}" ] || [ -z "${DB_NAME}" ] || [ -z "${DB_USERNAME}" ] || [ -z "${DB_PASSWORD}" ]; then + echo "Please make sure all required options are set " +else + ## Restore database + export PGPASSWORD=${DB_PASSWORD} + if [ -f "${STORAGE_PATH}/$FILE_NAME" ]; then + #pg_restore -h ${DB_HOST} -P ${DB_PORT} -U ${DB_USERNAME} -v -d ${DB_NAME} ${STORAGE_PATH}/$FILE_NAME + #cat ${STORAGE_PATH}/${FILE_NAME} | psql -h ${DB_HOST} -p ${DB_PORT} -U ${DB_USERNAME} -v -d ${DB_NAME} < ${STORAGE_PATH}/$FILE_NAME + if gzip -t ${STORAGE_PATH}/$FILE_NAME; then + zcat ${STORAGE_PATH}/${FILE_NAME} | psql -h ${DB_HOST} -p ${DB_PORT} -U ${DB_USERNAME} -v -d ${DB_NAME} + else + cat ${STORAGE_PATH}/${FILE_NAME} | psql -h ${DB_HOST} -p ${DB_PORT} -U ${DB_USERNAME} -v -d ${DB_NAME} + fi + echo "Database has been restored" + else + echo "Error, file not found in ${STORAGE_PATH}/${FILE_NAME}" + fi +fi +exit +} + +s3_backup() +{ + mount_s3 + backup +} + +s3_restore() +{ + mount_s3 + restore + +} + +mount_s3() +{ +if [ -z "${ACCESS_KEY}"] || [ -z "${SECRET_KEY}"]; then +echo "Please make sure all environment variables are set " +echo "BUCKETNAME=$BUCKETNAME \nACCESS_KEY=$nACCESS_KEY \nSECRET_KEY=$SECRET_KEY" +else + echo "$ACCESS_KEY:$SECRET_KEY" | tee /etc/passwd-s3fs + chmod 600 /etc/passwd-s3fs + echo "Mounting Object storage in /s3mnt .... " + if [ -z "$(ls -A /s3mnt)" ]; then + s3fs $BUCKETNAME /s3mnt -o passwd_file=/etc/passwd-s3fs -o use_cache=/tmp/s3cache -o allow_other -o url=$S3_ENDPOINT -o use_path_request_style + if [ ! -d "/s3mnt$S3_PATH" ]; then + mkdir -p /s3mnt$S3_PATH + fi + else + echo "Object storage already mounted in /s3mnt" + fi +export STORAGE_PATH=/s3mnt$S3_PATH +fi +} +flags "$@" +# ? + if [ $OPERATION != 'backup' ] + then + if [ $STORAGE != 's3' ] + then + echo "Restore from local" + restore + else + echo "Restore from s3" + s3_restore + fi + else + if [ $STORAGE != 's3' ] + then + echo "Backup to local destination" + backup + else + echo "Backup to s3 storage" + s3_backup + fi + fi \ No newline at end of file