1 | # shellcheck shell=bash |
2 |
|
3 | # This script uses pg_dumpall to dump your database into a |
4 | # single .sql file, gzip it and send to a S3 bucket. It also |
5 | # performs backup rotation to keep at least n latest versions |
6 | # of your choice. |
7 |
|
8 | # AWS S3 bucket name |
9 | bucket_name="${1}" |
10 |
|
11 | # Bucket region |
12 | region="${2}" |
13 |
|
14 | # Prefix aka "directory", if you want to store backups in root, |
15 | # leave a single slash |
16 | prefix="${3:-postgres-backups/}" |
17 |
|
18 | # Number of latest versions to keep. Older versions are removed |
19 | # automatically. Set value to 0 to disable rotation. |
20 | num_versions_to_keep="${4:-3}" |
21 |
|
22 | # Custom file name, final format: `%Y%m%d%H%M%S-${your_filename}.sql.gz` |
23 | filename="${5:-backup}" |
24 |
|
25 | # The Windmill path to the postgres resource. This script expects |
26 | # the resource to contains these fields: `host`, `password`, `port`, `user`. |
27 | postgres_resource_path="${6:-u/admin/postgres}" |
28 |
|
29 | # The Windmill path to the AWS access key resource. This script expects |
30 | # the resource to contains these fields: `awsAccessKeyId`, `awsSecretAccessKey`. |
31 | aws_resource_path="${7:-u/admin/aws-access-key}" |
32 |
|
33 | prefix=$(echo "$prefix" | sed "s/^\///" ) # trim leading slash |
34 |
|
35 | if [[ -z $(which pg_dumpall) ]]; then |
36 | apt-get install -y postgresql-client |
37 | fi |
38 |
|
39 | if [[ -z $(which aws) ]]; then |
40 | python -m pip install awscli |
41 | fi |
42 |
|
43 | postgres=$(curl -s -H "Authorization: Bearer $WM_TOKEN" \ |
44 | "$BASE_INTERNAL_URL/api/w/$WM_WORKSPACE/resources/get_value_interpolated/$postgres_resource_path") |
45 |
|
46 | aws_creds=$(curl -s -H "Authorization: Bearer $WM_TOKEN" \ |
47 | "$BASE_INTERNAL_URL/api/w/$WM_WORKSPACE/resources/get_value_interpolated/$aws_resource_path") |
48 |
|
49 | host=$(echo "$postgres" | jq -r .host) |
50 | password=$(echo "$postgres" | jq -r .password) |
51 | port=$(echo "$postgres" | jq -r .port) |
52 | user=$(echo "$postgres" | jq -r .user) |
53 |
|
54 | AWS_ACCESS_KEY_ID=$(echo "$aws_creds" | jq -r .awsAccessKeyId) |
55 | AWS_SECRET_ACCESS_KEY=$(echo "$aws_creds" | jq -r .awsSecretAccessKey) |
56 |
|
57 | export PGPASSWORD="$password" |
58 | export AWS_ACCESS_KEY_ID="$AWS_ACCESS_KEY_ID" |
59 | export AWS_SECRET_ACCESS_KEY="$AWS_SECRET_ACCESS_KEY" |
60 |
|
61 | # 20231231173731 |
62 | now=$(date "+%Y%m%d%H%M%S") |
63 |
|
64 | file_ext="sql.gz" |
65 |
|
66 | echo "Backing up database and uploading to ${bucket_name}/${prefix}${now}-${filename}.${file_ext}" |
67 |
|
68 | pg_dumpall -h "$host" -p "$port" -U "$user" \ |
69 | | gzip \ |
70 | | aws s3 cp - "s3://${bucket_name}/${prefix}${now}-${filename}.${file_ext}" --region ap-east-1 |
71 |
|
72 | if [ "$num_versions_to_keep" == "0" ]; then |
73 | exit 0; |
74 | fi |
75 |
|
76 | # List all files in the backup directory |
77 | files=$(aws s3api list-objects --region "$region" --bucket "$bucket_name" \ |
78 | --prefix "$prefix" \ |
79 | --query 'Contents[].{Key: Key}' \ |
80 | --output text) |
81 |
|
82 | echo "Removing older versions (if any)..." |
83 |
|
84 | echo "$files" \ |
85 | | grep -oP "^${prefix}[0-9]{14}-${filename}\.${file_ext}$" \ |
86 | | sort -r \ |
87 | | tail -n "+$((${num_versions_to_keep} + 1))" \ |
88 | | sed 's/\n/\\0/g' \ |
89 | | xargs --verbose -I {} aws s3 rm "s3://${bucket_name}/{}" --region "$region" |
90 |
|
91 | echo "success" |
92 |
|