Skip to content
Snippets Groups Projects
Commit 66a9a2b2 authored by Elliott Shugerman's avatar Elliott Shugerman
Browse files

initial commit

parents
No related branches found
No related tags found
No related merge requests found
Showing with 566 additions and 0 deletions
MIT License
Copyright (c) 2017 Johannes Schickling
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This is a fork and restructuring of schickling's [postgres-backup-s3](https://github.com/schickling/dockerfiles/tree/master/postgres-backup-s3) and [postgres-restore-s3](https://github.com/schickling/dockerfiles/tree/master/postgres-restore-s3).
See [`backup/README.md`](/backup/README.md) and [`restore/README.md`](/restore/README.md) for further instructions.
Fork goals:
- [x] dedicated repository
- [x] automated builds
- [x] support multiple PostgreSQL versions
- [ ] support encrypted (password-protected) backups
- [ ] merge backup and restore images?
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:3.8
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV POSTGRES_EXTRA_OPTS ''
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:3.10
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV POSTGRES_EXTRA_OPTS ''
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:edge
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV POSTGRES_EXTRA_OPTS ''
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:3.6
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV POSTGRES_EXTRA_OPTS ''
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]
# postgres-backup-s3
Backup PostgresSQL to S3 (supports periodic backups)
## Usage
Docker:
```sh
$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e POSTGRES_DATABASE=dbname -e POSTGRES_USER=user -e POSTGRES_PASSWORD=password -e POSTGRES_HOST=localhost schickling/postgres-backup-s3
```
Docker Compose:
```yaml
postgres:
image: postgres
environment:
POSTGRES_USER: user
POSTGRES_PASSWORD: password
pgbackups3:
image: schickling/postgres-backup-s3
links:
- postgres
environment:
SCHEDULE: '@daily'
S3_REGION: region
S3_ACCESS_KEY_ID: key
S3_SECRET_ACCESS_KEY: secret
S3_BUCKET: my-bucket
S3_PREFIX: backup
POSTGRES_DATABASE: dbname
POSTGRES_USER: user
POSTGRES_PASSWORD: password
POSTGRES_EXTRA_OPTS: '--schema=public --blobs'
```
### Automatic Periodic Backups
You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="@daily"` to run the backup automatically.
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules).
#! /bin/sh
set -e
set -o pipefail
if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then
echo "You need to set the S3_ACCESS_KEY_ID environment variable."
exit 1
fi
if [ "${S3_SECRET_ACCESS_KEY}" = "**None**" ]; then
echo "You need to set the S3_SECRET_ACCESS_KEY environment variable."
exit 1
fi
if [ "${S3_BUCKET}" = "**None**" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ "${POSTGRES_DATABASE}" = "**None**" ]; then
echo "You need to set the POSTGRES_DATABASE environment variable."
exit 1
fi
if [ "${POSTGRES_HOST}" = "**None**" ]; then
if [ -n "${POSTGRES_PORT_5432_TCP_ADDR}" ]; then
POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR
POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT
else
echo "You need to set the POSTGRES_HOST environment variable."
exit 1
fi
fi
if [ "${POSTGRES_USER}" = "**None**" ]; then
echo "You need to set the POSTGRES_USER environment variable."
exit 1
fi
if [ "${POSTGRES_PASSWORD}" = "**None**" ]; then
echo "You need to set the POSTGRES_PASSWORD environment variable or link to a container named POSTGRES."
exit 1
fi
if [ "${S3_ENDPOINT}" == "**None**" ]; then
AWS_ARGS=""
else
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi
# env vars needed for aws tools
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$S3_REGION
export PGPASSWORD=$POSTGRES_PASSWORD
POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTGRES_EXTRA_OPTS"
echo "Creating dump of ${POSTGRES_DATABASE} database from ${POSTGRES_HOST}..."
pg_dump $POSTGRES_HOST_OPTS $POSTGRES_DATABASE | gzip > dump.sql.gz
echo "Uploading dump to $S3_BUCKET"
cat dump.sql.gz | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz || exit 2
echo "SQL backup uploaded successfully"
#! /bin/sh
# exit if a command fails
set -e
apk update
# install pg_dump
apk add postgresql
# install s3 tools
apk add python py2-pip
pip install awscli
apk del py2-pip
# install go-cron
apk add curl
curl -L --insecure https://github.com/odise/go-cron/releases/download/v0.0.6/go-cron-linux.gz | zcat > /usr/local/bin/go-cron
chmod u+x /usr/local/bin/go-cron
apk del curl
# cleanup
rm -rf /var/cache/apk/*
#! /bin/sh
set -e
if [ "${S3_S3V4}" = "yes" ]; then
aws configure set default.s3.signature_version s3v4
fi
if [ "${SCHEDULE}" = "**None**" ]; then
sh backup.sh
else
exec go-cron "$SCHEDULE" /bin/sh backup.sh
fi
FROM alpine:{alpine_version}
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV POSTGRES_EXTRA_OPTS ''
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]
#!/bin/python3
VERSIONS = (
('9', '3.6'),
('10', '3.8'),
('11', '3.10'),
('12', 'edge'),
)
def render(dir_, postgres_version, alpine_version):
with open(f'{dir_}/template.Dockerfile') as f:
template = f.read()
rendered = template.format(alpine_version=alpine_version)
with open(f'{dir_}/{postgres_version}.Dockerfile', 'w') as f:
f.write('# This file is generated from template.Dockerfile. Do not edit it directly.\n')
f.write('###########################################################################\n\n')
f.write(rendered)
if __name__ == '__main__':
for versions in VERSIONS:
render('backup', *versions)
render('restore', *versions)
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:3.8
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV DROP_PUBLIC 'no'
ADD restore.sh restore.sh
CMD ["sh", "restore.sh"]
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:3.10
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV DROP_PUBLIC 'no'
ADD restore.sh restore.sh
CMD ["sh", "restore.sh"]
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:edge
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV DROP_PUBLIC 'no'
ADD restore.sh restore.sh
CMD ["sh", "restore.sh"]
# This file is generated from template.Dockerfile. Do not edit it directly.
###########################################################################
FROM alpine:3.6
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV DROP_PUBLIC 'no'
ADD restore.sh restore.sh
CMD ["sh", "restore.sh"]
# postgres-restore-s3
Restore a SQL backup from S3 to PostgresSQL
## Warning
This will potentially put your database in a very bad state or complete destroy your data, be very careful.
## Limitations
This is made to restore a backup made from postgres-backup-s3, if you backup came from somewhere else please check your format.
* Your s3 bucket *must* only contain backups which you wish to restore - it will always grabs the 'latest' based on unix sort with no filtering
* They must be gzip encoded text sql files
* If your bucket has more than a 1000 files the latest may not be restore, only one s3 ls command is made
## Usage
Docker:
```sh
$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e POSTGRES_DATABASE=dbname -e POSTGRES_USER=user -e POSTGRES_PASSWORD=password -e POSTGRES_HOST=localhost schickling/postgres-restore-s3
```
## Dropping public
If you wish to drop the public schema (drop schema public cascade; create schema public) then set the environment variable DROP_PUBLIC=yes. This is useful for situations where you wish to restore a database which currently has data / schemas in it.
#! /bin/sh
# exit if a command fails
set -e
apk update
# install pg_dump
apk add postgresql
# install s3 tools
apk add python py-pip
pip install awscli
apk del py-pip
# cleanup
rm -rf /var/cache/apk/*
#! /bin/sh
set -e
set -o pipefail
if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then
echo "You need to set the S3_ACCESS_KEY_ID environment variable."
exit 1
fi
if [ "${S3_SECRET_ACCESS_KEY}" = "**None**" ]; then
echo "You need to set the S3_SECRET_ACCESS_KEY environment variable."
exit 1
fi
if [ "${S3_BUCKET}" = "**None**" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ "${POSTGRES_DATABASE}" = "**None**" ]; then
echo "You need to set the POSTGRES_DATABASE environment variable."
exit 1
fi
if [ "${POSTGRES_HOST}" = "**None**" ]; then
if [ -n "${POSTGRES_PORT_5432_TCP_ADDR}" ]; then
POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR
POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT
else
echo "You need to set the POSTGRES_HOST environment variable."
exit 1
fi
fi
if [ "${POSTGRES_USER}" = "**None**" ]; then
echo "You need to set the POSTGRES_USER environment variable."
exit 1
fi
if [ "${POSTGRES_PASSWORD}" = "**None**" ]; then
echo "You need to set the POSTGRES_PASSWORD environment variable or link to a container named POSTGRES."
exit 1
fi
# env vars needed for aws tools
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$S3_REGION
export PGPASSWORD=$POSTGRES_PASSWORD
POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER"
echo "Finding latest backup"
LATEST_BACKUP=$(aws s3 ls s3://$S3_BUCKET/$S3_PREFIX/ | sort | tail -n 1 | awk '{ print $4 }')
echo "Fetching ${LATEST_BACKUP} from S3"
aws s3 cp s3://$S3_BUCKET/$S3_PREFIX/${LATEST_BACKUP} dump.sql.gz
gzip -d dump.sql.gz
if [ "${DROP_PUBLIC}" == "yes" ]; then
echo "Recreating the public schema"
psql $POSTGRES_HOST_OPTS -d $POSTGRES_DATABASE -c "drop schema public cascade; create schema public;"
fi
echo "Restoring ${LATEST_BACKUP}"
psql $POSTGRES_HOST_OPTS -d $POSTGRES_DATABASE < dump.sql
echo "Restore complete"
FROM alpine:{alpine_version}
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**
ENV POSTGRES_PASSWORD **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV DROP_PUBLIC 'no'
ADD restore.sh restore.sh
CMD ["sh", "restore.sh"]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment