Skip to content

weekly-db-backup

weekly-db-backup #27

name: weekly-db-backup
on:
schedule:
- cron: "0 5 * * 0" # Weekly backup: Runs at 5 am ET (us-east-1) every Sunday
workflow_dispatch:
jobs:
db-backup:
runs-on: ubuntu-latest
permissions:
id-token: write
env:
RETENTION: 180 # 6 months of backups
DATABASE_URL: ${{ secrets.DATABASE_URL }}
IAM_ROLE: ${{ secrets.IAM_ROLE }}
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }}
AWS_REGION: "us-east-1"
PG_VERSION: "15"
steps:
- name: Install PostgreSQL
run: |
sudo apt update
yes '' | sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh
sudo apt install -y postgresql-${{ env.PG_VERSION }}
- name: Set PostgreSQL binary path
run: echo "POSTGRES=/usr/lib/postgresql/${{ env.PG_VERSION }}/bin" >> $GITHUB_ENV
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::${{ env.AWS_ACCOUNT_ID }}:role/${{ env.IAM_ROLE }}
aws-region: ${{ env.AWS_REGION }}
- name: Set file, folder and path variables
run: |
GZIP_NAME="$(date +'%Y%m%d@%H%M%S').sql.gz"
FOLDER_NAME="${{ github.workflow }}"
UPLOAD_PATH="s3://${{ env.S3_BUCKET_NAME }}/${FOLDER_NAME}/${GZIP_NAME}"
echo "GZIP_NAME=${GZIP_NAME}" >> $GITHUB_ENV
echo "FOLDER_NAME=${FOLDER_NAME}" >> $GITHUB_ENV
echo "UPLOAD_PATH=${UPLOAD_PATH}" >> $GITHUB_ENV
- name: Create folder if it doesn't exist
run: |
if ! aws s3api head-object --bucket ${{ env.S3_BUCKET_NAME }} --key "${{ env.FOLDER_NAME }}/" 2>/dev/null; then
aws s3api put-object --bucket ${{ env.S3_BUCKET_NAME }} --key "${{ env.FOLDER_NAME }}/"
fi
- name: Run pg_dump
run: |
$POSTGRES/pg_dump ${{ env.DATABASE_URL }} | gzip > "${{ env.GZIP_NAME }}"
- name: Empty bucket of old files
run: |
THRESHOLD_DATE=$(date -d "-${{ env.RETENTION }} days" +%Y-%m-%dT%H:%M:%SZ)
aws s3api list-objects --bucket ${{ env.S3_BUCKET_NAME }} --prefix "${{ env.FOLDER_NAME }}/" --query "Contents[?LastModified<'${THRESHOLD_DATE}'] | [?ends_with(Key, '.gz')].{Key: Key}" --output text | while read -r file; do
aws s3 rm "s3://${{ env.S3_BUCKET_NAME }}/${file}"
done
- name: Upload to bucket
run: |
aws s3 cp "${{ env.GZIP_NAME }}" "${{ env.UPLOAD_PATH }}" --region ${{ env.AWS_REGION }}