├── .editorconfig ├── .github └── workflows │ └── release.yml ├── Dockerfile ├── LICENSE ├── README.md ├── backup.sh └── entrypoint.sh /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = LF 5 | charset = utf-8 6 | indent_size = 2 7 | indent_style = space 8 | max_line_length = 80 9 | 10 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | release: 4 | types: [published] 5 | 6 | jobs: 7 | build: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v3 12 | 13 | - name: Set up QEMU 14 | uses: docker/setup-qemu-action@v3 15 | 16 | - name: Set up Docker Buildx 17 | uses: docker/setup-buildx-action@v3 18 | 19 | - name: Login to Docker Hub 20 | uses: docker/login-action@v3 21 | with: 22 | username: ${{ secrets.DOCKERHUB_USERNAME }} 23 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 24 | 25 | - name: Generate image identifier 26 | id: name 27 | uses: ASzc/change-string-case-action@v5 28 | with: 29 | string: ${{ github.repository }} 30 | 31 | - name: Build 32 | uses: docker/build-push-action@v5 33 | with: 34 | push: true 35 | tags: | 36 | ${{ steps.name.outputs.lowercase }}:latest 37 | ${{ steps.name.outputs.lowercase }}:${{ github.event.release.name }} 38 | platforms: linux/amd64,linux/arm64 39 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.18.4 2 | LABEL maintainer="Fedor Borshev " 3 | 4 | RUN apk update && \ 5 | apk --no-cache add dumb-init curl aws-cli supercronic && \ 6 | apk --no-cache add mysql-client mariadb-connector-c 7 | 8 | ENV MYSQLDUMP_OPTIONS --quick --no-create-db --add-drop-table --add-locks --allow-keywords --quote-names --disable-keys --single-transaction --create-options --comments --net_buffer_length=16384 9 | ENV MYSQLDUMP_DATABASE **None** 10 | ENV MYSQL_HOST **None** 11 | ENV MYSQL_PORT 3306 12 | ENV MYSQL_USER **None** 13 | ENV MYSQL_PASSWORD **None** 14 | ENV S3_ACCESS_KEY_ID **None** 15 | ENV S3_SECRET_ACCESS_KEY **None** 16 | ENV S3_BUCKET **None** 17 | ENV S3_REGION us-west-1 18 | ENV S3_ENDPOINT **None** 19 | ENV S3_S3V4 no 20 | ENV S3_PREFIX 'backup' 21 | ENV S3_FILENAME **None** 22 | ENV MULTI_DATABASES no 23 | ENV SCHEDULE **None** 24 | ENV SUCCESS_WEBHOOK **None** 25 | 26 | ADD entrypoint.sh backup.sh / 27 | 28 | HEALTHCHECK CMD curl --fail http://localhost:9746/health || exit 1 29 | 30 | ENTRYPOINT ["/usr/bin/dumb-init", "--"] 31 | CMD ["sh", "/entrypoint.sh"] 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Johannes Schickling 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # mysql-backup-s3 4 | [![Docker Image Version (latest by date)](https://img.shields.io/docker/v/f213/mysql-backup-s3?sort=semver)](https://hub.docker.com/r/f213/mysql-backup-s3) ![Docker Image Size (tag)](https://img.shields.io/docker/image-size/f213/mysql-backup-s3/latest) ![Docker Pulls](https://img.shields.io/docker/pulls/f213/mysql-backup-s3) 5 | 6 | Periodicaly backup MySQL to S3. 7 | 8 | ## Usage 9 | 10 | ```sh 11 | $ docker run -e SCHEDULE="0 0 * * *" -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e MYSQL_USER=user -e MYSQL_PASSWORD=password -e MYSQL_HOST=localhost -e MYSQL_DATABASE=my-db f213/mysql-backup-s3 12 | ``` 13 | 14 | docker-compose: 15 | 16 | ```yaml 17 | services: 18 | mysql_backup: 19 | image: f213/mysql-backup-s3 20 | environment: 21 | SCHEDULE: 30 13 * * * # every day at 13:30 22 | S3_PREFIX: mysql 23 | MYSQLDUMP_DATABASE: my-db 24 | MYSQL_HOST: localhost 25 | MYSQL_USER: user 26 | MYSQL_PASSWORD: password 27 | S3_ACCESS_KEY_ID: key 28 | S3_SECRET_ACCESS_KEY: secret 29 | S3_BUCKET: my-bucket 30 | ``` 31 | 32 | ## Environment variables 33 | - `SCHEDULE` crontab-like syntax to schedule your backups 34 | - `SUCCESS_WEBHOOK` url to notify on success 35 | - `MYSQLDUMP_OPTIONS` mysqldump options (default: --quote-names --quick --add-drop-table --add-locks --allow-keywords --disable-keys --extended-insert --single-transaction --create-options --comments --net_buffer_length=16384) 36 | - `MYSQLDUMP_DATABASE` list of databases you want to backup (default: --all-databases) 37 | - `MYSQL_HOST` the mysql host *required* 38 | - `MYSQL_PORT` the mysql port (default: 3306) 39 | - `MYSQL_USER` the mysql user *required* 40 | - `MYSQL_PASSWORD` the mysql password *required* 41 | - `S3_ACCESS_KEY_ID` your AWS access key *required* 42 | - `S3_SECRET_ACCESS_KEY` your AWS secret key *required* 43 | - `S3_BUCKET` your AWS S3 bucket path *required* 44 | - `S3_PREFIX` path prefix in your bucket (default: 'backup') 45 | - `S3_FILENAME` a consistent filename to overwrite with your backup. If not set will use a timestamp. 46 | - `S3_REGION` the AWS S3 bucket region (default: us-west-1) 47 | - `S3_ENDPOINT` the AWS Endpoint URL, for S3 Compliant APIs such as [minio](https://minio.io) (default: none). 48 | 49 | 50 | - `S3_S3V4` set to `yes` to enable AWS Signature Version 4, required for [minio](https://minio.io) servers (default: no) 51 | - `MULTI_DATABASES` Allow to have one file per database if set `yes` default: no) 52 | 53 | --- 54 | 55 | This project was originally forked from [schickling/dockerfiles](https://github.com/schickling/dockerfiles/tree/master/mysql-backup-s3). 56 | -------------------------------------------------------------------------------- /backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | set -o pipefail 5 | 6 | if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then 7 | echo "Warning: You did not set the S3_ACCESS_KEY_ID environment variable." 8 | fi 9 | 10 | if [ "${S3_SECRET_ACCESS_KEY}" = "**None**" ]; then 11 | echo "Warning: You did not set the S3_SECRET_ACCESS_KEY environment variable." 12 | fi 13 | 14 | if [ "${S3_BUCKET}" = "**None**" ]; then 15 | echo "You need to set the S3_BUCKET environment variable." 16 | exit 1 17 | fi 18 | 19 | if [ "${MYSQLDUMP_DATABASE}" = "**None**" ]; then 20 | echo "You need to set the MYSQLDUMP_DATABASE environment variable (database name OR --all-databases)." 21 | exit 1 22 | fi 23 | 24 | if [ "${MYSQL_HOST}" = "**None**" ]; then 25 | echo "You need to set the MYSQL_HOST environment variable." 26 | exit 1 27 | fi 28 | 29 | if [ "${MYSQL_USER}" = "**None**" ]; then 30 | echo "You need to set the MYSQL_USER environment variable." 31 | exit 1 32 | fi 33 | 34 | if [ "${MYSQL_PASSWORD}" = "**None**" ]; then 35 | echo "You need to set the MYSQL_PASSWORD environment variable or link to a container named MYSQL." 36 | exit 1 37 | fi 38 | 39 | if [ "${S3_IAMROLE}" != "true" ]; then 40 | # env vars needed for aws tools - only if an IAM role is not used 41 | export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID 42 | export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY 43 | export AWS_DEFAULT_REGION=$S3_REGION 44 | fi 45 | 46 | MYSQL_HOST_OPTS="-h $MYSQL_HOST -P $MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD" 47 | DUMP_START_TIME=$(date +"%Y-%m-%dT%H%M%SZ") 48 | 49 | mysqldump --version 50 | 51 | copy_s3 () { 52 | SRC_FILE=$1 53 | DEST_FILE=$2 54 | 55 | if [ "${S3_ENDPOINT}" = "**None**" ]; then 56 | AWS_ARGS="" 57 | else 58 | AWS_ARGS="--endpoint-url ${S3_ENDPOINT}" 59 | fi 60 | 61 | echo "Uploading ${DEST_FILE} on S3..." 62 | 63 | cat $SRC_FILE | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE 64 | 65 | if [ $? != 0 ]; then 66 | >&2 echo "Error uploading ${DEST_FILE} on S3" 67 | fi 68 | 69 | rm $SRC_FILE 70 | } 71 | 72 | # Multi databases: yes 73 | if [ ! -z "$(echo $MULTI_DATABASES | grep -i -E "(yes|true|1)")" ]; then 74 | if [ "${MYSQLDUMP_DATABASE}" = "--all-databases" ]; then 75 | DATABASES=`mysql $MYSQL_HOST_OPTS -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql|sys|innodb)"` 76 | else 77 | DATABASES=$MYSQLDUMP_DATABASE 78 | fi 79 | 80 | for DB in $DATABASES; do 81 | echo "Creating individual dump of ${DB} from ${MYSQL_HOST}..." 82 | 83 | DUMP_FILE="/tmp/${DB}.sql.gz" 84 | 85 | mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS $DB | gzip > $DUMP_FILE 86 | 87 | if [ $? = 0 ]; then 88 | if [ "${S3_FILENAME}" = "**None**" ]; then 89 | S3_FILE="${DUMP_START_TIME}.${DB}.sql.gz" 90 | else 91 | S3_FILE="${S3_FILENAME}.${DB}.sql.gz" 92 | fi 93 | 94 | copy_s3 $DUMP_FILE $S3_FILE 95 | else 96 | >&2 echo "Error creating dump of ${DB}" 97 | fi 98 | done 99 | # Multi databases: no 100 | else 101 | echo "Creating dump for ${MYSQLDUMP_DATABASE} from ${MYSQL_HOST}..." 102 | DB=$MYSQLDUMP_DATABASE 103 | 104 | DUMP_FILE="/tmp/${DB}.sql.gz" 105 | mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS $DB | gzip > $DUMP_FILE 106 | 107 | if [ $? = 0 ]; then 108 | if [ "${S3_FILENAME}" = "**None**" ]; then 109 | S3_FILE="${DUMP_START_TIME}.${DB}.sql.gz" 110 | else 111 | S3_FILE="${S3_FILENAME}.${DB}.sql.gz" 112 | fi 113 | 114 | copy_s3 $DUMP_FILE $S3_FILE 115 | else 116 | >&2 echo "Error creating dump of ${DB}" 117 | fi 118 | fi 119 | 120 | echo "SQL backup finished" 121 | 122 | if [ "${SUCCESS_WEBHOOK}" != "**None**" ]; then 123 | echo "Notifying $SUCCESS_WEBHOOK" 124 | curl -m 10 --retry 5 $SUCCESS_WEBHOOK 125 | fi 126 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | if [ "${S3_S3V4}" = "yes" ]; then 6 | aws configure set default.s3.signature_version s3v4 7 | fi 8 | 9 | if [ "${SCHEDULE}" = "**None**" ]; then 10 | echo You need to set up SCHEDULE env var 11 | exit 127 12 | else 13 | echo "${SCHEDULE} /bin/sh /backup.sh" > /etc/crontab.backup 14 | exec supercronic -debug -prometheus-listen-address 0.0.0.0 /etc/crontab.backup 15 | fi 16 | --------------------------------------------------------------------------------