Add Docke Entrypoint

This commit is contained in:
2024-08-10 09:39:50 +02:00
parent aeac088e0c
commit 3a5b810d37
17 changed files with 273 additions and 86 deletions

View File

@@ -18,32 +18,32 @@ docker-build:
docker build -f docker/Dockerfile -t ${IMAGE_NAME}:latest . docker build -f docker/Dockerfile -t ${IMAGE_NAME}:latest .
docker-run: docker-build docker-run: docker-build
docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --prune --keep-last 2 docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --prune --keep-last 2
docker-restore: docker-build docker-restore: docker-build
docker run --rm --network web --user 1000:1000 --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup restore -f ${FILE_NAME} docker run --rm --network web --user 1000:1000 --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} restore -f ${FILE_NAME}
docker-run-scheduled: docker-build docker-run-scheduled: docker-build
docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --mode scheduled --period "* * * * *" docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --mode scheduled --period "* * * * *"
docker-run-scheduled-s3: docker-build docker-run-scheduled-s3: docker-build
docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *" docker run --rm --network web --name pg-bkup -v "./backup:/backup" -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --storage s3 --mode scheduled --path /custom-path --period "* * * * *"
docker-run-s3: docker-build docker-run-s3: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage s3 --mode scheduled --path custom-path --period "* * * * *" docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${AWS_S3_ENDPOINT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --storage s3 --mode scheduled --path custom-path --period "* * * * *"
docker-restore-s3: docker-build docker-restore-s3: docker-build
docker run --rm --network web --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup restore --storage s3 -f ${FILE_NAME} #--path /custom-path docker run --rm --network web --privileged --device /dev/fuse --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "ACCESS_KEY=${ACCESS_KEY}" -e "SECRET_KEY=${SECRET_KEY}" -e "BUCKET_NAME=${AWS_S3_BUCKET_NAME}" -e "S3_ENDPOINT=${S3_ENDPOINT}" -e "AWS_REGION=eu2" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} restore --storage s3 -f ${FILE_NAME} #--path /custom-path
docker-run-ssh: docker-build docker-run-ssh: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage ssh docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --storage ssh
docker-run-scheduled-ssh: docker-build docker-run-scheduled-ssh: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} bkup backup --storage ssh --mode scheduled --period "* * * * *" docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" ${IMAGE_NAME} backup --storage ssh --mode scheduled --period "* * * * *"
docker-restore-ssh: docker-build docker-restore-ssh: docker-build
docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" ${IMAGE_NAME} bkup restore --storage ssh -f ${FILE_NAME} docker run --rm --network web --name pg-bkup -e "DB_HOST=${DB_HOST}" -e "DB_NAME=${DB_NAME}" -e "DB_USERNAME=${DB_USERNAME}" -e "DB_PASSWORD=${DB_PASSWORD}" -e "SSH_USER=${SSH_USER}" -e "SSH_HOST_NAME=${SSH_HOST_NAME}" -e "SSH_REMOTE_PATH=${SSH_REMOTE_PATH}" -e "SSH_PASSWORD=${SSH_PASSWORD}" -e "SSH_PORT=${SSH_PORT}" -e "GPG_PASSPHRASE=${GPG_PASSPHRASE}" -e "SSH_IDENTIFY_FILE=${SSH_IDENTIFY_FILE}" ${IMAGE_NAME} restore --storage ssh -f ${FILE_NAME}
run-docs: run-docs:
cd docs && bundle exec jekyll serve -H 0.0.0.0 -t cd docs && bundle exec jekyll serve -H 0.0.0.0 -t

View File

@@ -1,5 +1,5 @@
# PostgreSQL Backup # PostgreSQL Backup
pg-bkup is a Docker container image that can be used to backup and restore Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage. PostgreSQL Backup is a Docker container image that can be used to backup and restore Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage.
It also supports __encrypting__ your backups using GPG. It also supports __encrypting__ your backups using GPG.
The [jkaninda/pg-bkup](https://hub.docker.com/r/jkaninda/pg-bkup) Docker image can be deployed on Docker, Docker Swarm and Kubernetes. The [jkaninda/pg-bkup](https://hub.docker.com/r/jkaninda/pg-bkup) Docker image can be deployed on Docker, Docker Swarm and Kubernetes.
@@ -13,6 +13,7 @@ It also supports __encrypting__ your backups using GPG.
![Docker Pulls](https://img.shields.io/docker/pulls/jkaninda/pg-bkup?style=flat-square) ![Docker Pulls](https://img.shields.io/docker/pulls/jkaninda/pg-bkup?style=flat-square)
- Docker - Docker
- Docker Swarm
- Kubernetes - Kubernetes
## Documentation is found at <https://jkaninda.github.io/pg-bkup> ## Documentation is found at <https://jkaninda.github.io/pg-bkup>
@@ -44,7 +45,7 @@ To run a one time backup, bind your local volume to `/backup` in the container a
-e "DB_HOST=dbhost" \ -e "DB_HOST=dbhost" \
-e "DB_USERNAME=username" \ -e "DB_USERNAME=username" \
-e "DB_PASSWORD=password" \ -e "DB_PASSWORD=password" \
jkaninda/pg-bkup pg-bkup backup -d database_name jkaninda/pg-bkup backup -d database_name
``` ```
Alternatively, pass a `--env-file` in order to use a full config as described below. Alternatively, pass a `--env-file` in order to use a full config as described below.
@@ -61,10 +62,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup
- /bin/sh
- -c
- pg-bkup backup
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:
@@ -89,7 +87,7 @@ For Kubernetes, you don't need to run it in scheduled mode. You can deploy it as
apiVersion: batch/v1 apiVersion: batch/v1
kind: CronJob kind: CronJob
metadata: metadata:
name: bkup-job name: backup-job
spec: spec:
schedule: "0 1 * * *" schedule: "0 1 * * *"
jobTemplate: jobTemplate:
@@ -100,9 +98,11 @@ spec:
- name: pg-bkup - name: pg-bkup
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
command: command:
- /bin/sh - bkup
- -c - backup
- pg-bkup backup -s s3 --path /custom_path - --storage
- s3
- --disable-compression
env: env:
- name: DB_PORT - name: DB_PORT
value: "5432" value: "5432"

View File

@@ -35,7 +35,6 @@ func init() {
rootCmd.PersistentFlags().StringP("dbname", "d", "", "Database name") rootCmd.PersistentFlags().StringP("dbname", "d", "", "Database name")
rootCmd.PersistentFlags().IntP("port", "p", 5432, "Database port") rootCmd.PersistentFlags().IntP("port", "p", 5432, "Database port")
rootCmd.PersistentFlags().StringVarP(&operation, "operation", "o", "", "Set operation, for old version only") rootCmd.PersistentFlags().StringVarP(&operation, "operation", "o", "", "Set operation, for old version only")
rootCmd.AddCommand(VersionCmd) rootCmd.AddCommand(VersionCmd)
rootCmd.AddCommand(BackupCmd) rootCmd.AddCommand(BackupCmd)
rootCmd.AddCommand(RestoreCmd) rootCmd.AddCommand(RestoreCmd)

View File

@@ -65,4 +65,4 @@ RUN ln -s /usr/local/bin/pg-bkup /usr/local/bin/bkup
ADD docker/supervisord.conf /etc/supervisor/supervisord.conf ADD docker/supervisord.conf /etc/supervisor/supervisord.conf
WORKDIR $WORKDIR WORKDIR $WORKDIR
#ENTRYPOINT ["/usr/local/bin/pg-bkup"] ENTRYPOINT ["/usr/local/bin/pg-bkup"]

View File

@@ -22,10 +22,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --storage s3 -d database --path /my-custom-path
- /bin/sh
- -c
- pg-bkup backup --storage s3 -d database --path /my-custom-path
environment: environment:
- DB_PORT=5432 - DB_PORT=5432
- DB_HOST=postgres - DB_HOST=postgres
@@ -62,10 +59,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --storage s3 -d my-database --mode scheduled --period "0 1 * * *"
- /bin/sh
- -c
- pg-bkup backup --storage s3 -d my-database --mode scheduled --period "0 1 * * *"
environment: environment:
- DB_PORT=5432 - DB_PORT=5432
- DB_HOST=postgres - DB_HOST=postgres

View File

@@ -23,10 +23,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --storage remote -d database
- /bin/sh
- -c
- pg-bkup backup --storage remote -d database
volumes: volumes:
- ./id_ed25519:/tmp/id_ed25519" - ./id_ed25519:/tmp/id_ed25519"
environment: environment:

View File

@@ -7,7 +7,7 @@ nav_order: 1
# Backup database # Backup database
To backup the database, you need to add `backup` subcommand to `pg-bkup` or `bkup`. To backup the database, you need to add `backup` command.
{: .note } {: .note }
The default storage is local storage mounted to __/backup__. The backup is compressed by default using gzip. The flag __`disable-compression`__ can be used when you need to disable backup compression. The default storage is local storage mounted to __/backup__. The backup is compressed by default using gzip. The flag __`disable-compression`__ can be used when you need to disable backup compression.
@@ -15,7 +15,9 @@ The default storage is local storage mounted to __/backup__. The backup is compr
{: .warning } {: .warning }
Creating a user for backup tasks who has read-only access is recommended! Creating a user for backup tasks who has read-only access is recommended!
The backup process can be run in scheduled mode for the recurring backups. The backup process can be run in scheduled mode for the recurring backups on Docker or Docker Swarm.
On Kubernetes it can be run as CronJob, you don't need to run it in Scheduled mode.
It handles __recurring__ backups of postgres database on Docker and can be deployed as __CronJob on Kubernetes__ using local, AWS S3 or SSH compatible storage. It handles __recurring__ backups of postgres database on Docker and can be deployed as __CronJob on Kubernetes__ using local, AWS S3 or SSH compatible storage.
```yml ```yml
@@ -27,10 +29,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup -d database
- /bin/sh
- -c
- pg-bkup backup -d database
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:
@@ -54,7 +53,7 @@ networks:
-e "DB_HOST=dbhost" \ -e "DB_HOST=dbhost" \
-e "DB_USERNAME=username" \ -e "DB_USERNAME=username" \
-e "DB_PASSWORD=password" \ -e "DB_PASSWORD=password" \
jkaninda/pg-bkup pg-bkup backup -d database_name jkaninda/pg-bkup backup -d database_name
``` ```
In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below. In case you need to use recurring backups, you can use `--mode scheduled` and specify the periodical backup time by adding `--period "0 1 * * *"` flag as described below.
@@ -68,10 +67,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: #command: backup -d database --mode scheduled --period "0 1 * * *"
- /bin/sh
- -c
- pg-bkup backup -d database --mode scheduled --period "0 1 * * *"
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:

View File

@@ -0,0 +1,228 @@
---
title: Deploy on Kubernetes
layout: default
parent: How Tos
nav_order: 8
---
## Deploy on Kubernetes
To deploy PostgreSQL Backup on Kubernetes, you can use Job to backup or Restore your database.
For recurring backup you can use CronJob, you don't need to run it in scheduled mode. as described bellow.
## Backup Job
```yaml
apiVersion: batch/v1
kind: Job
metadata:
name: backup
spec:
ttlSecondsAfterFinished: 100
template:
spec:
containers:
- name: pg-bkup
image: jkaninda/pg-bkup
command:
- bkup
- backup
- --storage
- ssh
- --disable-compression
resources:
limits:
memory: "128Mi"
cpu: "500m"
env:
- name: DB_PORT
value: "5432"
- name: DB_HOST
value: ""
- name: DB_NAME
value: "dbname"
- name: DB_USERNAME
value: "postgres"
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: SSH_HOST_NAME
value: "xxx"
- name: SSH_PORT
value: "22"
- name: SSH_USER
value: "xxx"
- name: SSH_PASSWORD
value: "xxxx"
- name: SSH_REMOTE_PATH
value: "/home/toto/backup"
# Optional, required if you want to encrypt your backup
- name: GPG_PASSPHRASE
value: "xxxx"
restartPolicy: Never
```
## Restore Job
```yaml
apiVersion: batch/v1
kind: Job
metadata:
name: restore-job
spec:
ttlSecondsAfterFinished: 100
template:
spec:
containers:
- name: pg-bkup
image: jkaninda/pg-bkup
command:
- bkup
- restore
- --storage
- ssh
- --file store_20231219_022941.sql.gz
resources:
limits:
memory: "128Mi"
cpu: "500m"
env:
- name: DB_PORT
value: "5432"
- name: DB_HOST
value: ""
- name: DB_NAME
value: "dbname"
- name: DB_USERNAME
value: "postgres"
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: SSH_HOST_NAME
value: "xxx"
- name: SSH_PORT
value: "22"
- name: SSH_USER
value: "xxx"
- name: SSH_PASSWORD
value: "xxxx"
- name: SSH_REMOTE_PATH
value: "/home/toto/backup"
# Optional, required if your backup was encrypted
#- name: GPG_PASSPHRASE
# value: "xxxx"
restartPolicy: Never
```
## Recurring backup
```yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: backup-job
spec:
schedule: "* * * * *"
jobTemplate:
spec:
template:
spec:
containers:
- name: pg-bkup
image: jkaninda/pg-bkup
command:
- bkup
- backup
- --storage
- ssh
- --disable-compression
resources:
limits:
memory: "128Mi"
cpu: "500m"
env:
- name: DB_PORT
value: "5432"
- name: DB_HOST
value: ""
- name: DB_NAME
value: "test"
- name: DB_USERNAME
value: "postgres"
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: SSH_HOST_NAME
value: "192.168.1.16"
- name: SSH_PORT
value: "2222"
- name: SSH_USER
value: "jkaninda"
- name: SSH_REMOTE_PATH
value: "/config/backup"
- name: SSH_PASSWORD
value: "password"
# Optional, required if you want to encrypt your backup
#- name: GPG_PASSPHRASE
# value: "xxx"
restartPolicy: Never
```
## Kubernetes Rootless
```yaml
apiVersion: batch/v1
kind: CronJob
metadata:
name: backup-job
spec:
schedule: "* * * * *"
jobTemplate:
spec:
template:
spec:
securityContext:
runAsUser: 1000
runAsGroup: 3000
fsGroup: 2000
containers:
- name: pg-bkup
image: jkaninda/pg-bkup
command:
- bkup
- backup
- --storage
- ssh
- --disable-compression
resources:
limits:
memory: "128Mi"
cpu: "500m"
env:
- name: DB_PORT
value: "5432"
- name: DB_HOST
value: ""
- name: DB_NAME
value: "test"
- name: DB_USERNAME
value: "postgres"
# Please use secret!
- name: DB_PASSWORD
value: ""
- name: SSH_HOST_NAME
value: "192.168.1.16"
- name: SSH_PORT
value: "2222"
- name: SSH_USER
value: "jkaninda"
- name: SSH_REMOTE_PATH
value: "/config/backup"
- name: SSH_PASSWORD
value: "password"
# Optional, required if you want to encrypt your backup
#- name: GPG_PASSPHRASE
# value: "xxx"
restartPolicy: OnFailure
```

View File

@@ -30,10 +30,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup -d database
- /bin/sh
- -c
- pg-bkup backup -d database
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:

View File

@@ -7,7 +7,7 @@ nav_order: 5
# Restore database from S3 storage # Restore database from S3 storage
To restore the database, you need to add `restore` subcommand to `pg-bkup` or `bkup` and specify the file to restore by adding `--file store_20231219_022941.sql.gz`. To restore the database, you need to add `restore` command and specify the file to restore by adding `--file store_20231219_022941.sql.gz`.
{: .note } {: .note }
It supports __.sql__ and __.sql.gz__ compressed file. It supports __.sql__ and __.sql.gz__ compressed file.
@@ -23,10 +23,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: restore --storage s3 -d my-database -f store_20231219_022941.sql.gz --path /my-custom-path
- /bin/sh
- -c
- bkup restore --storage s3 -d my-database -f store_20231219_022941.sql.gz --path /my-custom-path
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:

View File

@@ -6,7 +6,7 @@ nav_order: 6
--- ---
# Restore database from SSH remote server # Restore database from SSH remote server
To restore the database from your remote server, you need to add `restore` subcommand to `pg-bkup` or `bkup` and specify the file to restore by adding `--file store_20231219_022941.sql.gz`. To restore the database from your remote server, you need to add `restore` command and specify the file to restore by adding `--file store_20231219_022941.sql.gz`.
{: .note } {: .note }
It supports __.sql__ and __.sql.gz__ compressed file. It supports __.sql__ and __.sql.gz__ compressed file.
@@ -22,10 +22,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: restore --storage ssh -d my-database -f store_20231219_022941.sql.gz --path /home/jkaninda/backups
- /bin/sh
- -c
- pg-bkup restore --storage ssh -d my-database -f store_20231219_022941.sql.gz --path /home/jkaninda/backups
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:

View File

@@ -7,7 +7,7 @@ nav_order: 4
# Restore database # Restore database
To restore the database, you need to add `restore` subcommand to `pg-bkup` or `bkup` and specify the file to restore by adding `--file store_20231219_022941.sql.gz`. To restore the database, you need to add `restore` command and specify the file to restore by adding `--file store_20231219_022941.sql.gz`.
{: .note } {: .note }
It supports __.sql__ and __.sql.gz__ compressed file. It supports __.sql__ and __.sql.gz__ compressed file.
@@ -23,10 +23,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: restore -d database -f store_20231219_022941.sql.gz
- /bin/sh
- -c
- pg-bkup restore -d database -f store_20231219_022941.sql.gz
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:

View File

@@ -6,7 +6,7 @@ nav_order: 1
# About pg-bkup # About pg-bkup
{:.no_toc} {:.no_toc}
pg-bkup is a Docker container image that can be used to backup and restore Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage. PostreSQL Backup is a Docker container image that can be used to backup and restore Postgres database. It supports local storage, AWS S3 or any S3 Alternatives for Object Storage, and SSH compatible storage.
It also supports __encrypting__ your backups using GPG. It also supports __encrypting__ your backups using GPG.
We are open to receiving stars, PRs, and issues! We are open to receiving stars, PRs, and issues!
@@ -40,7 +40,7 @@ To run a one time backup, bind your local volume to `/backup` in the container a
-e "DB_HOST=dbhost" \ -e "DB_HOST=dbhost" \
-e "DB_USERNAME=username" \ -e "DB_USERNAME=username" \
-e "DB_PASSWORD=password" \ -e "DB_PASSWORD=password" \
jkaninda/pg-bkup pg-bkup backup -d database_name jkaninda/pg-bkup backup -d database_name
``` ```
Alternatively, pass a `--env-file` in order to use a full config as described below. Alternatively, pass a `--env-file` in order to use a full config as described below.
@@ -56,10 +56,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup
- /bin/sh
- -c
- pg-bkup backup
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:
@@ -81,8 +78,8 @@ This Docker image is published to both Docker Hub and the GitHub container regis
Depending on your preferences and needs, you can reference both `jkaninda/pg-bkup` as well as `ghcr.io/jkaninda/pg-bkup`: Depending on your preferences and needs, you can reference both `jkaninda/pg-bkup` as well as `ghcr.io/jkaninda/pg-bkup`:
``` ```
docker pull jkaninda/pg-bkup:v1.2.0 docker pull jkaninda/pg-bkup
docker pull ghcr.io/jkaninda/pg-bkup:v1.2.0 docker pull ghcr.io/jkaninda/pg-bkup
``` ```
Documentation references Docker Hub, but all examples will work using ghcr.io just as well. Documentation references Docker Hub, but all examples will work using ghcr.io just as well.

View File

@@ -6,10 +6,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --storage s3 -d my-database"
- /bin/sh
- -c
- pg-bkup backup --storage s3 -d my-database"
environment: environment:
- DB_PORT=5432 - DB_PORT=5432
- DB_HOST=postgres - DB_HOST=postgres

View File

@@ -3,10 +3,7 @@ services:
pg-bkup: pg-bkup:
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --dbname database_name --mode scheduled --period "0 1 * * *"
- /bin/sh
- -c
- pg-bkup backup --dbname database_name --mode scheduled --period "0 1 * * *"
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment:

View File

@@ -6,10 +6,7 @@ services:
# for a list of available releases. # for a list of available releases.
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --storage s3 -d my-database --mode scheduled --period "0 1 * * *"
- /bin/sh
- -c
- pg-bkup backup --storage s3 -d my-database --mode scheduled --period "0 1 * * *"
environment: environment:
- DB_PORT=5432 - DB_PORT=5432
- DB_HOST=postgres - DB_HOST=postgres

View File

@@ -3,10 +3,7 @@ services:
pg-bkup: pg-bkup:
image: jkaninda/pg-bkup image: jkaninda/pg-bkup
container_name: pg-bkup container_name: pg-bkup
command: command: backup --dbname database_name
- /bin/sh
- -c
- pg-bkup backup --dbname database_name
volumes: volumes:
- ./backup:/backup - ./backup:/backup
environment: environment: