diff --git a/Dockerfile b/Dockerfile index ad33265..06a59cd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,17 @@ -FROM alpine:3.15 +FROM alpine:3.18 + + RUN apk add --update \ + python3 \ + curl \ + which \ + bash + + RUN curl -sSL https://sdk.cloud.google.com | bash + + ENV PATH $PATH:/root/google-cloud-sdk/bin RUN apk update \ - && apk --no-cache add dumb-init postgresql-client curl aws-cli + && apk --no-cache add dumb-init postgresql15-client curl RUN curl -L https://github.com/odise/go-cron/releases/download/v0.0.7/go-cron-linux.gz | zcat > /usr/local/bin/go-cron && chmod +x /usr/local/bin/go-cron diff --git a/backup.sh b/backup.sh index 5f2ab59..0edb5c1 100644 --- a/backup.sh +++ b/backup.sh @@ -48,28 +48,41 @@ case "${PG_BACKUP_ACTION:-dump}" in AWS_ARGS="--endpoint-url ${S3_ENDPOINT}" fi + # Google Cloud Auth + echo "Authenticating to Google Cloud..." + echo $S3_SECRET_ACCESS_KEY | base64 -d > /key.json + gcloud auth activate-service-account --key-file /key.json --project "$S3_ACCESS_KEY_ID" -q + # env vars needed for aws tools export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY export AWS_DEFAULT_REGION=$S3_REGION + # Define a cleanup function + cleanup() { + echo "Cleaning up..." + rm -f dump.backup + } + + # Set a trap to call the cleanup function when the script exits + trap cleanup EXIT + # TODO: check if database is fresh echo "Snapshotting $POSTGRES_DB database" pg_dump -Fc $POSTGRES_HOST_OPTS $POSTGRES_DB > dump.backup - aws configure set default.s3.multipart_chunksize 16MB if [ "${PRIVATE_BACKUP}" == "true" ] || [ "${PRIVATE_BACKUP}" == "1" ]; then echo "Rotating old snapshot" - aws $AWS_ARGS s3 cp s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup --acl private || true + gsutil cp gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup || true echo "Uploading fresh private snapshot to $S3_BUCKET/$S3_PATH/$S3_FILENAME" - cat dump.backup | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup --acl private || exit 2 + cat dump.backup | gsutil cp - gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup || exit 2 else echo "Rotating old snapshot" - aws $AWS_ARGS s3 cp s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup --acl public-read || true + gsutil cp -a public-read gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.old.backup || true echo "Uploading fresh public snapshot to $S3_BUCKET/$S3_PATH/$S3_FILENAME" - cat dump.backup | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup --acl public-read || exit 2 + cat dump.backup | gsutil cp -a public-read - gs://$S3_BUCKET/$S3_PATH/$S3_FILENAME.backup || exit 2 fi echo "Snapshot uploaded successfully, removing local file" diff --git a/boto.config b/boto.config new file mode 100644 index 0000000..c68baf8 --- /dev/null +++ b/boto.config @@ -0,0 +1,13 @@ +[Credentials] +gs_access_key_id=replace_gs_access_key_id +gs_secret_access_key=replace_gs_secret_access_key + +[Boto] + +[GoogleCompute] + +[GSUtil] +content_language = en +default_api_version = 2 + +[OAuth2] \ No newline at end of file