Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pg 15 gcs #4

Open
wants to merge 15 commits into
base: master
Choose a base branch
from
14 changes: 12 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
FROM alpine:3.15
FROM alpine:3.18

RUN apk add --update \
python3 \
curl \
which \
bash

RUN curl -sSL https://sdk.cloud.google.com | bash

ENV PATH $PATH:/root/google-cloud-sdk/bin

RUN apk update \
&& apk --no-cache add dumb-init postgresql-client curl aws-cli
&& apk --no-cache add dumb-init postgresql15-client curl

RUN curl -L https://github.com/odise/go-cron/releases/download/v0.0.7/go-cron-linux.gz | zcat > /usr/local/bin/go-cron && chmod +x /usr/local/bin/go-cron

Expand Down
23 changes: 18 additions & 5 deletions backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -48,28 +48,41 @@ case "${PG_BACKUP_ACTION:-dump}" in
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi

# Google Cloud Auth
echo "Authenticating to Google Cloud..."
echo $S3_SECRET_ACCESS_KEY | base64 -d > /key.json
gcloud auth activate-service-account --key-file /key.json --project "$S3_ACCESS_KEY_ID" -q

# env vars needed for aws tools
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$S3_REGION

# Define a cleanup function
cleanup() {
echo "Cleaning up..."
rm -f dump.backup
}

# Set a trap to call the cleanup function when the script exits
trap cleanup EXIT

# TODO: check if database is fresh
echo "Snapshotting $POSTGRES_DB database"
pg_dump -Fc $POSTGRES_HOST_OPTS $POSTGRES_DB > dump.backup
aws configure set default.s3.multipart_chunksize 16MB

if [ "${PRIVATE_BACKUP}" == "true" ] || [ "${PRIVATE_BACKUP}" == "1" ]; then
echo "Rotating old snapshot"
aws $AWS_ARGS s3 cp s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup --acl private || true
gsutil cp gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup || true

echo "Uploading fresh private snapshot to $S3_BUCKET/$S3_PATH/$S3_FILENAME"
cat dump.backup | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup --acl private || exit 2
cat dump.backup | gsutil cp - gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup || exit 2
else
echo "Rotating old snapshot"
aws $AWS_ARGS s3 cp s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup --acl public-read || true
gsutil cp -a public-read gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup || true

echo "Uploading fresh public snapshot to $S3_BUCKET/$S3_PATH/$S3_FILENAME"
cat dump.backup | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup --acl public-read || exit 2
cat dump.backup | gsutil cp -a public-read - gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup || exit 2
fi

echo "Snapshot uploaded successfully, removing local file"
Expand Down
13 changes: 13 additions & 0 deletions boto.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[Credentials]
gs_access_key_id=replace_gs_access_key_id
gs_secret_access_key=replace_gs_secret_access_key

[Boto]

[GoogleCompute]

[GSUtil]
content_language = en
default_api_version = 2

[OAuth2]
Loading