diff --git a/.gherkin-lintrc b/.gherkin-lintrc new file mode 100644 index 0000000..9b12b0b --- /dev/null +++ b/.gherkin-lintrc @@ -0,0 +1,26 @@ +{ + "no-unnamed-features": "on", + "no-unnamed-scenarios": "on", + "no-unused-variables": "on", + "no-unused-variables": "on", + "no-dupe-feature-names": "on", + "no-dupe-scenario-names": "on", + "no-empty-background": "on", + "no-empty-file": "on", + "no-examples-in-scenarios": "on", + "no-scenario-outlines-without-examples": "on", + "no-multiple-empty-lines": "on", + "use-and": "on", + "indentation": [ + "on", {} + ], + "no-trailing-spaces": "on", + "new-line-at-eof": ["on", "yes"], + "one-space-between-tags": "on", + + "no-files-without-scenarios": "off", + "allowed-tags": [ + "on", {"tags": ["@caho還沒做", "@maherekayto做好了"]} + ] +} + diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 0000000..0182ace --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1 @@ +disable=SC1091 diff --git a/.travis.yml b/.travis.yml index 3092055..42672a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,96 +15,126 @@ branches: only: - main before_install: - - curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" - - unzip -q awscliv2.zip - - sudo ./aws/install - - pip install awscli-local + - pip install --upgrade pip setuptools jobs: include: - name: Check YAML format + install: + - pip install tox + script: + - tox -e yamllint + - name: Check Markdown Format + install: + - pip install tox + script: + - tox -e pymarkdown + - name: Check Gherkin format + language: node_js + node_js: 22 before_install: skip install: - - pip install --upgrade pip setuptools - - pip install yamllint + - npm install gherkin-lint script: - - yamllint . + - npx gherkin-lint + - name: Check Python Format + install: + - pip install tox + script: + - tox -e flake8 + - name: Check Bash format + install: + - pip install tox + script: + - tox -e shellcheck - name: backup once + before_install: &before_install_aws + - pip install --upgrade pip setuptools + - pip install -r requirements_travisci.txt + - DOCKER_CONFIG=${DOCKER_CONFIG:-$HOME/.docker} + - mkdir -p $DOCKER_CONFIG/cli-plugins + - curl -SL https://github.com/docker/compose/releases/download/v2.38.1/docker-compose-linux-x86_64 -o $DOCKER_CONFIG/cli-plugins/docker-compose + - chmod +x $DOCKER_CONFIG/cli-plugins/docker-compose install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" script: - - docker-compose --file tests/docker-compose-backup.yml up --detach + - docker compose --file tests/docker-compose-backup.yml up --detach --quiet-pull - DATE=`date "+%Y-%m-%d"` - TIME=`date "+%Y%m%dT%H%M"` - sleep 10 - - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" postgres15.sql.gz + - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" postgres15.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: starting message + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" script: - - docker-compose --file tests/docker-compose-backup.yml run --rm backup | tee backup.log + - docker compose --file tests/docker-compose-backup.yml run --rm backup 2>&1 | tee backup.log - cat backup.log | grep 'multiple-databases-backup is starting.' - cat backup.log | grep 'multiple-databases-backup is finished, exiting.' - cat backup.log - name: crontab + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-minute.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-minute.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" script: - - docker-compose --file tests/docker-compose-backup-minute.yml up --detach + - docker compose --file tests/docker-compose-backup-minute.yml up --detach --quiet-pull - sleep 1m - DATE=`date "+%Y-%m-%d"` - TIME=`date "+%Y%m%dT%H%M"` - - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" postgres15.sql.gz + - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" postgres15.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - sleep 1m - DATE=`date "+%Y-%m-%d"` - TIME=`date "+%Y%m%dT%H%M"` - - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" postgres15.sql.gz + - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" postgres15.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - - docker-compose --file tests/docker-compose-backup-minute.yml exec backup cat /var/log/cron.log - - docker-compose --file tests/docker-compose-backup-minute.yml exec backup cat /var/log/cron.error.log + - docker compose --file tests/docker-compose-backup-minute.yml exec backup cat /var/log/cron.log + - docker compose --file tests/docker-compose-backup-minute.yml exec backup cat /var/log/cron.error.log - name: backup once when erasing the SCHEDULE variable + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-minute.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-minute.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" script: - - docker-compose --file tests/docker-compose-backup-minute.yml run -e SCHEDULE= backup | tee backup.log + - docker compose --file tests/docker-compose-backup-minute.yml run --rm -e SCHEDULE= backup 2>&1 | tee backup.log - cat backup.log | grep 'There is not SCHEDULE variable' - name: exit with SCHEDULE wrong format + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-minute.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-minute.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" script: - - (docker-compose --file tests/docker-compose-backup-minute.yml run -e SCHEDULE="0 * *" backup > backup.log) || (echo "The exit code is $?." | tee error.log) + - (docker compose --file tests/docker-compose-backup-minute.yml run --rm -e SCHEDULE="0 * *" backup > backup.log 2>&1) || (echo "The exit code is $?." | tee error.log) - cat backup.log - cat backup.log | grep 'The SCHEDULE variable should contain five fields exactly.' - cat error.log | grep 'The exit code is 1.' - name: GPG encrypt with asymmetric key + before_install: *before_install_aws env: - GPG_PRIVATE_KEY_PATH=tests/gpg-key/ithuan.tw.asc - GPG_PRIVATE_KEY_PASSPHRASE_PATH=tests/gpg-key/ithuan.tw.passphrase install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-encrypt.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-encrypt.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" script: @@ -113,225 +143,235 @@ jobs: - echo "GPG_PUBLIC_KEY=${GPG_PUBLIC_KEY}" | tee tests/.env - DATE=`date "+%Y-%m-%d"` - TIME=`date "+%Y%m%dT%H%M"` - - docker-compose --file tests/docker-compose-backup-encrypt.yml run --rm backup + - docker compose --file tests/docker-compose-backup-encrypt.yml run --rm backup - sleep 10 - - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz.gpg" postgres15.sql.gz.gpg + - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz.gpg" postgres15.sql.gz.gpg - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - gpg --decrypt --batch --pinentry-mode loopback --passphrase-file ${GPG_PRIVATE_KEY_PASSPHRASE_PATH} postgres15.sql.gz.gpg | zcat - name: backup strategy for keeping backup in 72 hours(3 days) + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET_TIME=`date "+%Y-%m-%d %H:%M" --date '3 days ago'` - DATE=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - TIME=`date "+%Y%m%dT%H%M" --date "${TARGET_TIME}"` - - OLD_BACKUP="postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" + - OLD_BACKUP="postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" postgres15.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for deleting backup exceeding 72 hours(3 days) + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET_TIME=`date "+%Y-%m-%d %H:%M" --date '3 days ago 1 hour ago'` - DATE=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - TIME=`date "+%Y%m%dT%H%M" --date "${TARGET_TIME}"` - - OLD_BACKUP="postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" + - OLD_BACKUP="postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" - DAY_TARGET_TIME=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - DAY_DATE=`date "+%Y-%m-%d" --date "${DAY_TARGET_TIME}"` - DAY_TIME=`date "+%Y%m%dT%H%M" --date "${DAY_TARGET_TIME}"` - - DAY_BACKUP="postgres15_postgres_1/${DAY_DATE}/postgres15_postgres_1_${DAY_TIME}.sql.gz" + - DAY_BACKUP="postgres15-postgres-1/${DAY_DATE}/postgres15-postgres-1_${DAY_TIME}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${DAY_BACKUP}" --body test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" postgres15.sql.gz || (echo 'Not found.' | tee error.log) - cat error.log | grep 'Not found.' - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for keeping day backup in 90 days(3 months) + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET_TIME=`date "+%Y-%m-01" --date '3 months ago'` - DATE=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - TIME=`date "+%Y%m%dT%H%M" --date "${TARGET_TIME}"` - - OLD_BACKUP="postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" + - OLD_BACKUP="postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" postgres15.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for keeping day backups if they are backuped in different day. + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET1_TIME=`date "+%Y-%m-01" --date '2 months ago'` - DATE1=`date "+%Y-%m-%d" --date "${TARGET1_TIME}"` - TIME1=`date "+%Y%m%dT%H%M" --date "${TARGET1_TIME}"` - - DAY1_BACKUP="postgres15_postgres_1/${DATE1}/postgres15_postgres_1_${TIME1}.sql.gz" + - DAY1_BACKUP="postgres15-postgres-1/${DATE1}/postgres15-postgres-1_${TIME1}.sql.gz" - TARGET2_TIME=`date "+%Y-%m-02" --date '2 months ago'` - DATE2=`date "+%Y-%m-%d" --date "${TARGET2_TIME}"` - TIME2=`date "+%Y%m%dT%H%M" --date "${TARGET2_TIME}"` - - DAY2_BACKUP="postgres15_postgres_1/${DATE2}/postgres15_postgres_1_${TIME2}.sql.gz" + - DAY2_BACKUP="postgres15-postgres-1/${DATE2}/postgres15-postgres-1_${TIME2}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${DAY1_BACKUP}" --body test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${DAY2_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${DAY1_BACKUP}" postgres15-1.sql.gz - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${DAY2_BACKUP}" postgres15-2.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for deleting backup because of keeping day backup only + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET1_TIME="`date "+%Y-%m-01 09:00" --date '3 months ago'`" - DATE1=`date "+%Y-%m-%d" --date "${TARGET1_TIME}"` - TIME1=`date "+%Y%m%dT%H%M" --date "${TARGET1_TIME}"` - - DAY_BACKUP="postgres15_postgres_1/${DATE1}/postgres15_postgres_1_${TIME1}.sql.gz" + - DAY_BACKUP="postgres15-postgres-1/${DATE1}/postgres15-postgres-1_${TIME1}.sql.gz" - TARGET2_TIME="`date "+%Y-%m-01 10:00" --date '3 months ago'`" - DATE2=`date "+%Y-%m-%d" --date "${TARGET2_TIME}"` - TIME2=`date "+%Y%m%dT%H%M" --date "${TARGET2_TIME}"` - - NOT_DAY_BACKUP="postgres15_postgres_1/${DATE2}/postgres15_postgres_1_${TIME2}.sql.gz" + - NOT_DAY_BACKUP="postgres15-postgres-1/${DATE2}/postgres15-postgres-1_${TIME2}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${DAY_BACKUP}" --body test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${NOT_DAY_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${NOT_DAY_BACKUP}" postgres15.sql.gz || (echo 'Not found.' | tee error.log) - cat error.log | grep 'Not found.' - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for deleting day backup exceeding 90 days(3 months) + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET_TIME=`date "+%Y-%m-10" --date '5 months ago'` - DATE=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - TIME=`date "+%Y%m%dT%H%M" --date "${TARGET_TIME}"` - - OLD_BACKUP="postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" + - OLD_BACKUP="postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" - MONTH_TARGET_TIME=`date "+%Y-%m-01" --date "${TARGET_TIME}"` - MONTH_DATE=`date "+%Y-%m-%d" --date "${MONTH_TARGET_TIME}"` - MONTH_TIME=`date "+%Y%m%dT%H%M" --date "${MONTH_TARGET_TIME}"` - - MONTH_BACKUP="postgres15_postgres_1/${MONTH_DATE}/postgres15_postgres_1_${MONTH_TIME}.sql.gz" + - MONTH_BACKUP="postgres15-postgres-1/${MONTH_DATE}/postgres15-postgres-1_${MONTH_TIME}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${MONTH_BACKUP}" --body test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" postgres15.sql.gz || (echo 'Not found.' | tee error.log) - cat error.log | grep 'Not found.' - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for keeping month backup in 36 months(3 years) + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET_TIME=`date "+%Y-%m-01" --date '3 years ago'` - DATE=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - TIME=`date "+%Y%m%dT%H%M" --date "${TARGET_TIME}"` - - OLD_BACKUP="postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" + - OLD_BACKUP="postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" postgres15.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for keeping month backups if they are backuped in different month. + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET1_TIME=`date "+%Y-01-01" --date '1 years ago'` - DATE1=`date "+%Y-%m-%d" --date "${TARGET1_TIME}"` - TIME1=`date "+%Y%m%dT%H%M" --date "${TARGET1_TIME}"` - - MONTH1_BACKUP="postgres15_postgres_1/${DATE1}/postgres15_postgres_1_${TIME1}.sql.gz" + - MONTH1_BACKUP="postgres15-postgres-1/${DATE1}/postgres15-postgres-1_${TIME1}.sql.gz" - TARGET2_TIME=`date "+%Y-02-02" --date '1 years ago'` - DATE2=`date "+%Y-%m-%d" --date "${TARGET2_TIME}"` - TIME2=`date "+%Y%m%dT%H%M" --date "${TARGET2_TIME}"` - - MONTH2_BACKUP="postgres15_postgres_1/${DATE2}/postgres15_postgres_1_${TIME2}.sql.gz" + - MONTH2_BACKUP="postgres15-postgres-1/${DATE2}/postgres15-postgres-1_${TIME2}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${MONTH1_BACKUP}" --body test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${MONTH2_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${MONTH1_BACKUP}" postgres15-1.sql.gz - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${MONTH2_BACKUP}" postgres15-2.sql.gz - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for deleting backup because of keeping month backup only + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET1_TIME="`date "+%Y-%m-01 09:00" --date '10 months ago'`" - DATE1=`date "+%Y-%m-%d" --date "${TARGET1_TIME}"` - TIME1=`date "+%Y%m%dT%H%M" --date "${TARGET1_TIME}"` - - MONTH_BACKUP="postgres15_postgres_1/${DATE1}/postgres15_postgres_1_${TIME1}.sql.gz" + - MONTH_BACKUP="postgres15-postgres-1/${DATE1}/postgres15-postgres-1_${TIME1}.sql.gz" - TARGET2_TIME="`date "+%Y-%m-20 09:00" --date '10 months ago'`" - DATE2=`date "+%Y-%m-%d" --date "${TARGET2_TIME}"` - TIME2=`date "+%Y%m%dT%H%M" --date "${TARGET2_TIME}"` - - NOT_MONTH_BACKUP="postgres15_postgres_1/${DATE2}/postgres15_postgres_1_${TIME2}.sql.gz" + - NOT_MONTH_BACKUP="postgres15-postgres-1/${DATE2}/postgres15-postgres-1_${TIME2}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${MONTH_BACKUP}" --body test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${NOT_MONTH_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${NOT_MONTH_BACKUP}" postgres15.sql.gz || (echo 'Not found.' | tee error.log) - cat error.log | grep 'Not found.' - awslocal s3api list-objects --bucket "${BUCKET_NAME}" - name: backup strategy for deleting month backup exceeding 36 months(3 years) + before_install: *before_install_aws install: - - docker-compose --file tests/postgres15/docker-compose.yml up --detach - - docker-compose --file tests/s3/docker-compose-localstack.yml up --detach - - docker-compose --file tests/docker-compose-backup-strategy.yml build + - docker compose --file tests/postgres15/docker-compose.yml up --detach --quiet-pull + - docker compose --file tests/s3/docker-compose-localstack.yml up --detach --quiet-pull + - docker compose --file tests/docker-compose-backup-strategy.yml build --quiet - sleep 10 - awslocal s3api create-bucket --bucket "${BUCKET_NAME}" before_script: - TARGET_TIME=`date "+%Y-%m-01" --date '3 years ago 1 months ago'` - DATE=`date "+%Y-%m-%d" --date "${TARGET_TIME}"` - TIME=`date "+%Y%m%dT%H%M" --date "${TARGET_TIME}"` - - OLD_BACKUP="postgres15_postgres_1/${DATE}/postgres15_postgres_1_${TIME}.sql.gz" + - OLD_BACKUP="postgres15-postgres-1/${DATE}/postgres15-postgres-1_${TIME}.sql.gz" script: - echo '# SQL' | gzip > test.sql.gz - awslocal s3api put-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" --body test.sql.gz - - docker-compose --file tests/docker-compose-backup-strategy.yml run --rm backup + - docker compose --file tests/docker-compose-backup-strategy.yml run --rm backup - awslocal s3api get-object --bucket "${BUCKET_NAME}" --key "${OLD_BACKUP}" postgres15.sql.gz || (echo 'Not found.' | tee error.log) - cat error.log | grep 'Not found.' - awslocal s3api list-objects --bucket "${BUCKET_NAME}" diff --git a/Dockerfile b/Dockerfile index d095583..81224dd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,29 +1,28 @@ -FROM ubuntu:latest +FROM ubuntu:24.04 +# https://github.com/aws/aws-cli/blob/v2/CHANGELOG.rst?plain=1 +RUN groupadd --system --gid 138 docker ARG DEBIAN_FRONTEND=noninteractive - RUN apt update && \ - apt install -y \ - ca-certificates \ - curl \ - gnupg \ - lsb-release && \ - mkdir -m 0755 -p /etc/apt/keyrings && \ - curl -fsSL https://download.docker.com/linux/ubuntu/gpg \ - | gpg --dearmor -o /etc/apt/keyrings/docker.gpg && \ - echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \ - $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null && \ - apt-get update && \ - apt install -y docker-ce-cli cron gnupg jq - -RUN apt install -y unzip && \ + apt install --no-install-recommends -y \ + docker.io cron gnupg jq \ + curl ca-certificates unzip && \ + apt-get clean && \ mkdir /aws_build/ && \ - curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "/aws_build/awscliv2.zip" && \ + update-ca-certificates && \ + curl -sSf "https://awscli.amazonaws.com/awscli-exe-linux-x86_64-2.22.35.zip" -o "/aws_build/awscliv2.zip" && \ unzip -q /aws_build/awscliv2.zip -d /aws_build/ && \ /aws_build/aws/install && \ rm -rf /aws_build/ +RUN useradd --uid 1001 nonroot --user-group && \ + usermod -aG docker nonroot && \ + touch /etc/environment && \ + chown nonroot:nonroot /etc/environment && \ + chmod u+s /usr/sbin/cron + +WORKDIR /app/ COPY scripts/ /app/ -CMD bash /app/start.sh +USER nonroot +CMD ["bash", "/app/start.sh"] diff --git a/README.md b/README.md index 79bd7ef..732500a 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Backup databases from dockerized PostgresSQL to any S3-compatible storage with a ## Quick Start -``` +```yaml version: '3' services: postgres: @@ -46,7 +46,7 @@ services: ### Backup Multiple Databases Simultaneously -To backup multiple databases simultaneously, you can label the database containers that require backup with the `backup.postgres=true` label. The backup script will then identify all containers with this label and execute the backup command for each of them. +To backup multiple databases simultaneously, you can label the database containers that require backup with the `backup.postgres=true` label. The backup script will then identify all containers with this label and execute the backup command for each of them. ### Easy Configuration @@ -75,6 +75,7 @@ The codebase undergoes automatic testing using Travis CI, which covers backup sc ## Configuration ### S3 Storage Configurations + - `S3_ENDPOINT_URL` (required): The S3 endpoint URL in the form of `http:///` or `https:/// `. Note that the scheme should be included. - `S3_REGION`: The name of the S3 region (eg. `eu-west-1`). This may be optional depending on your storage vendor. @@ -86,7 +87,6 @@ The codebase undergoes automatic testing using Travis CI, which covers backup sc - `SCHEDULE`: The backup schedule specified in a string following [crontab syntax](https://www.man7.org/linux/man-pages/man5/crontab.5.html) where the five fields are minute, hour, day of month, month and day of week. If set to a blank string, the script will perform a instant backup and exit. The default value is a blank string. - ### GPG Key - `GPG_PUBLIC_KEY`: Base64-encoded GPG public key used in the encryption process. If not set, backup files will be uploaded and saved un-encrypted. @@ -95,11 +95,14 @@ The codebase undergoes automatic testing using Travis CI, which covers backup sc 1. [Generate a new GPG key](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key) if there is not any existing GPG key. 2. Encode GPG public key in base64 format and write it into the `.env` file. + ```bash GPG_PUBLIC_KEY=`gpg --armor --export | base64 --wrap 0` echo "GPG_PUBLIC_KEY=${GPG_PUBLIC_KEY}" > .env ``` + 3. Export the private key and store it securely. The private key is needed when decrypting a backup file. + ```bash gpg --export-secret-keys --armor > ``` @@ -107,10 +110,13 @@ gpg --export-secret-keys --armor > #### Decrypt a Backup File 1. Import the gpg private key if it hasn't been imported yet. + ```bash gpg --import ``` + 2. Decrypt the backup file to get the original SQL. + ```bash gpg --decrypt | zcat ``` diff --git a/requirements_travisci.in b/requirements_travisci.in new file mode 100644 index 0000000..d13a290 --- /dev/null +++ b/requirements_travisci.in @@ -0,0 +1,4 @@ +awscli +boto3~=1.35.0 + +awscli-local diff --git a/requirements_travisci.txt b/requirements_travisci.txt new file mode 100644 index 0000000..5f37f7e --- /dev/null +++ b/requirements_travisci.txt @@ -0,0 +1,45 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile requirements_travisci.in +# +awscli==1.36.40 + # via -r requirements_travisci.in +awscli-local==0.22.0 + # via -r requirements_travisci.in +boto3==1.35.99 + # via + # -r requirements_travisci.in + # localstack-client +botocore==1.35.99 + # via + # awscli + # boto3 + # s3transfer +colorama==0.4.6 + # via awscli +docutils==0.16 + # via awscli +jmespath==1.0.1 + # via + # boto3 + # botocore +localstack-client==2.10 + # via awscli-local +pyasn1==0.6.1 + # via rsa +python-dateutil==2.9.0.post0 + # via botocore +pyyaml==6.0.2 + # via awscli +rsa==4.7.2 + # via awscli +s3transfer==0.10.4 + # via + # awscli + # boto3 +six==1.17.0 + # via python-dateutil +urllib3==2.5.0 + # via botocore diff --git a/scripts/backup.sh b/scripts/backup.sh index 64eabd8..15a09a1 100755 --- a/scripts/backup.sh +++ b/scripts/backup.sh @@ -23,7 +23,7 @@ all_containers=`docker container list \ for postgres_container_name in $all_containers do >&2 echo "Backuping ${postgres_container_name} is starting." - FILE_PATH=$(filepath ${postgres_container_name} 'now') + FILE_PATH=$(filepath "${postgres_container_name}" 'now') docker exec "${postgres_container_name}" pg_dump -U postgres \ | gzip \ | ${ENCRYPT_COMMAND} \ diff --git a/scripts/cleanup.sh b/scripts/cleanup.sh index ebff041..f9a3901 100755 --- a/scripts/cleanup.sh +++ b/scripts/cleanup.sh @@ -5,7 +5,7 @@ source /app/filepath.sh CONTAINER_NAME=$1 -FILE_PATH=$(filepath ${CONTAINER_NAME} "${MAX_PERIOD_IN_HOURS_TO_KEEP_EVERY_BACKUPS} hours ago") +FILE_PATH=$(filepath "${CONTAINER_NAME}" "${MAX_PERIOD_IN_HOURS_TO_KEEP_EVERY_BACKUPS} hours ago") temp_dir=$(mktemp -d) ALL_FILES="${temp_dir}/all.list" @@ -29,7 +29,7 @@ aws s3api list-objects-v2 \ for day in $(seq 1 "${MAX_PERIOD_IN_DAYS_TO_KEEP_DAILY_BACKUPS}") do TARGET_DAY=`date "+%Y-%m-%d" --date "${day} days ago"` - FILE_PATH=$(filepath ${CONTAINER_NAME} "${TARGET_DAY}") + FILE_PATH=$(filepath "${CONTAINER_NAME}" "${TARGET_DAY}") aws s3api list-objects-v2 \ --endpoint-url "${S3_ENDPOINT_URL}" \ --bucket "${S3_BUCKET}" \ @@ -43,7 +43,7 @@ done for month in $(seq 1 "${MAX_PERIOD_IN_MONTHS_TO_KEEP_MONTHLY_BACKUPS}") do TARGET_DAY=`date "+%Y-%m-01" --date "${month} months ago"` - FILE_PATH=$(filepath ${CONTAINER_NAME} "${TARGET_DAY}") + FILE_PATH=$(filepath "${CONTAINER_NAME}" "${TARGET_DAY}") aws s3api list-objects-v2 \ --endpoint-url "${S3_ENDPOINT_URL}" \ --bucket "${S3_BUCKET}" \ @@ -54,14 +54,18 @@ do >> "${PRESERVE_FILES}" done -for filename in `cat "${ALL_FILES}" \ - | grep --invert-match --line-regexp --file "${PRESERVE_FILES}" \ - | sed 's/^"\(.*\)"$/\1/g'` +sort -u "${ALL_FILES}" > "${temp_dir}/tmp.list" +mv "${temp_dir}/tmp.list" "${ALL_FILES}" +sort -u "${PRESERVE_FILES}" > "${temp_dir}/tmp.list" +mv "${temp_dir}/tmp.list" "${PRESERVE_FILES}" +comm -23 "${ALL_FILES}" "${PRESERVE_FILES}" \ + | sed 's/^[[:space:]]*"\(.*\)"$/\1/g' \ + | while IFS= read -r filename do aws s3api delete-object \ --endpoint-url "${S3_ENDPOINT_URL}" \ --bucket "${S3_BUCKET}" \ - --key ${filename} + --key "${filename}" done rm -rf "${temp_dir}" diff --git a/scripts/crontab.sh b/scripts/crontab.sh index dd67cf5..abaedb4 100644 --- a/scripts/crontab.sh +++ b/scripts/crontab.sh @@ -2,7 +2,7 @@ set -euo pipefail echo "PATH=${PATH} -${SCHEDULE} bash /app/backup.sh >> /var/log/cron.log 2>> /var/log/cron.error.log" \ +${SCHEDULE} bash /app/backup.sh" \ | crontab - cron -f -L 15 diff --git a/scripts/filepath.sh b/scripts/filepath.sh index 5690506..0f06e02 100644 --- a/scripts/filepath.sh +++ b/scripts/filepath.sh @@ -3,9 +3,13 @@ set -euo pipefail function filepath() { - local container_name="$1" - local target_time="$2" - local DATE=`date "+%Y-%m-%d" --date "${target_time}"` - local TIME=`date "+%Y%m%dT%H%M" --date "${target_time}"` + local container_name + local target_time + local DATE + local TIME + container_name="$1" + target_time="$2" + DATE=`date "+%Y-%m-%d" --date "${target_time}"` + TIME=`date "+%Y%m%dT%H%M" --date "${target_time}"` echo "${container_name}/${DATE}/${container_name}_${TIME}" } diff --git a/scripts/start.sh b/scripts/start.sh index ad3b5bb..daf05e0 100644 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -31,7 +31,7 @@ if [ -z "${GPG_PUBLIC_KEY}" ]; then >&2 echo 'There is not a GPG_PUBLIC_KEY, all backup files will not be encrypted.' else >&2 echo 'There is the GPG_PUBLIC_KEY, all backup files will be encrypted.' - echo ${GPG_PUBLIC_KEY} | base64 -d > ${GPG_PUBLIC_KEY_PATH} + echo "${GPG_PUBLIC_KEY}" | base64 -d > ${GPG_PUBLIC_KEY_PATH} fi if [ -z "${SCHEDULE}" ]; then >&2 echo "multiple-databases-backup is starting." diff --git a/shellcheck.sh b/shellcheck.sh new file mode 100644 index 0000000..7542ba1 --- /dev/null +++ b/shellcheck.sh @@ -0,0 +1,12 @@ +#!/bin/bash +exit_code=0 +shellcheck --severity=info scripts/*; +tsitkai="$?" +exit_code=$(( tsitkai != 0 ? tsitkai : exit_code)) +while IFS= read -r -d '' file +do + shellcheck --severity=info "$file"; + tsitkai="$?" + exit_code=$(( tsitkai != 0 ? tsitkai : exit_code)) +done < <(find . -type f -name '*.sh' -not -path './venv/*' -not -path './scripts/*' -print0) +exit $(( exit_code == 0 ? 0 : 1)) diff --git a/tests/docker-compose-backup-encrypt.yml b/tests/docker-compose-backup-encrypt.yml index 72cc381..1b019a0 100644 --- a/tests/docker-compose-backup-encrypt.yml +++ b/tests/docker-compose-backup-encrypt.yml @@ -1,5 +1,4 @@ --- -version: '3' services: backup: build: ../ diff --git a/tests/docker-compose-backup-minute.yml b/tests/docker-compose-backup-minute.yml index 57cf0ca..70c2f41 100644 --- a/tests/docker-compose-backup-minute.yml +++ b/tests/docker-compose-backup-minute.yml @@ -1,5 +1,4 @@ --- -version: '3' services: backup: build: ../ diff --git a/tests/docker-compose-backup-strategy.yml b/tests/docker-compose-backup-strategy.yml index 538c2ec..2dc45fc 100644 --- a/tests/docker-compose-backup-strategy.yml +++ b/tests/docker-compose-backup-strategy.yml @@ -1,5 +1,4 @@ --- -version: '3' services: backup: build: ../ diff --git a/tests/docker-compose-backup.yml b/tests/docker-compose-backup.yml index e23213a..c88b876 100644 --- a/tests/docker-compose-backup.yml +++ b/tests/docker-compose-backup.yml @@ -1,5 +1,4 @@ --- -version: '3' services: backup: build: ../ diff --git a/tests/postgres15/docker-compose.yml b/tests/postgres15/docker-compose.yml index 3a2393f..b8587c5 100644 --- a/tests/postgres15/docker-compose.yml +++ b/tests/postgres15/docker-compose.yml @@ -1,5 +1,4 @@ --- -version: '3' services: postgres: image: postgres:15 diff --git a/tests/s3/docker-compose-localstack.yml b/tests/s3/docker-compose-localstack.yml index c7cde06..4c04a9e 100644 --- a/tests/s3/docker-compose-localstack.yml +++ b/tests/s3/docker-compose-localstack.yml @@ -1,5 +1,4 @@ --- -version: '3' services: localstack: container_name: "${LOCALSTACK_DOCKER_NAME-localstack_main}" diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..559d66c --- /dev/null +++ b/tox.ini @@ -0,0 +1,36 @@ +[tox] +skipsdist = True + +[flake8] +max-line-length = 79 +exclude = + .git + .tox + venv + +[testenv:yamllint] +deps = + yamllint +commands = + yamllint . + +[testenv:flake8] +deps = + flake8 +commands = + flake8 . --show-source --count + +[testenv:pymarkdown] +deps = + pymarkdownlnt +commands = + pymarkdown \ + --strict-config \ + --disable-rules md013,md029 \ + scan . + +[testenv:shellcheck] +allowlist_externals = + bash +commands = + bash shellcheck.sh