# shellcheck shell=bash
# This script uses pg_dumpall to dump your database into a
# single .sql file, gzip it and send to a S3 bucket. It also
# performs backup rotation to keep at least n latest versions
# of your choice.
# AWS S3 bucket name
bucket_name="${1}"
# Bucket region
region="${2}"
# Prefix aka "directory", if you want to store backups in root,
# leave a single slash
prefix="${3:-postgres-backups/}"
# Number of latest versions to keep. Older versions are removed
# automatically. Set value to 0 to disable rotation.
num_versions_to_keep="${4:-3}"
# Custom file name, final format: `%Y%m%d%H%M%S-${your_filename}.sql.gz`
filename="${5:-backup}"
# The Windmill path to the postgres resource. This script expects
# the resource to contains these fields: `host`, `password`, `port`, `user`.
postgres_resource_path="${6:-u/admin/postgres}"
# The Windmill path to the AWS access key resource. This script expects
# the resource to contains these fields: `awsAccessKeyId`, `awsSecretAccessKey`.
aws_resource_path="${7:-u/admin/aws-access-key}"
prefix=$(echo "$prefix" | sed "s/^\///" ) # trim leading slash
if [[ -z $(which pg_dumpall) ]]; then
apt-get install -y postgresql-client
fi
if [[ -z $(which aws) ]]; then
python -m pip install awscli
fi
postgres=$(curl -s -H "Authorization: Bearer $WM_TOKEN" \
"$BASE_INTERNAL_URL/api/w/$WM_WORKSPACE/resources/get_value_interpolated/$postgres_resource_path")
aws_creds=$(curl -s -H "Authorization: Bearer $WM_TOKEN" \
"$BASE_INTERNAL_URL/api/w/$WM_WORKSPACE/resources/get_value_interpolated/$aws_resource_path")
host=$(echo "$postgres" | jq -r .host)
password=$(echo "$postgres" | jq -r .password)
port=$(echo "$postgres" | jq -r .port)
user=$(echo "$postgres" | jq -r .user)
AWS_ACCESS_KEY_ID=$(echo "$aws_creds" | jq -r .awsAccessKeyId)
AWS_SECRET_ACCESS_KEY=$(echo "$aws_creds" | jq -r .awsSecretAccessKey)
export PGPASSWORD="$password"
export AWS_ACCESS_KEY_ID="$AWS_ACCESS_KEY_ID"
export AWS_SECRET_ACCESS_KEY="$AWS_SECRET_ACCESS_KEY"
# 20231231173731
now=$(date "+%Y%m%d%H%M%S")
file_ext="sql.gz"
echo "Backing up database and uploading to ${bucket_name}/${prefix}${now}-${filename}.${file_ext}"
pg_dumpall -h "$host" -p "$port" -U "$user" \
| gzip \
| aws s3 cp - "s3://${bucket_name}/${prefix}${now}-${filename}.${file_ext}" --region ap-east-1
if [ "$num_versions_to_keep" == "0" ]; then
exit 0;
fi
# List all files in the backup directory
files=$(aws s3api list-objects --region "$region" --bucket "$bucket_name" \
--prefix "$prefix" \
--query 'Contents[].{Key: Key}' \
--output text)
echo "Removing older versions (if any)..."
echo "$files" \
| grep -oP "^${prefix}[0-9]{14}-${filename}\.${file_ext}$" \
| sort -r \
| tail -n "+$((${num_versions_to_keep} + 1))" \
| sed 's/\n/\\0/g' \
| xargs --verbose -I {} aws s3 rm "s3://${bucket_name}/{}" --region "$region"
echo "success"
Submitted by hahuylonghai2012722 417 days ago