Skip to content

Commit 7eb533b

Browse files
committed
Compress dump files to reduce size fo the files
1 parent f4ed37c commit 7eb533b

File tree

2 files changed

+54
-26
lines changed

2 files changed

+54
-26
lines changed

images/backup-restore/start.sh

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -35,21 +35,22 @@ cloudStorageOps() {
3535
}
3636

3737
backupDB() {
38-
local LOCAL_BACKUP_FILE=${BACKUP_CLOUD_FILE}.dump
39-
local CLOUD_BACKUP_FILE="${BACKUP_CLOUD_FOLDER}/${BACKUP_CLOUD_FILE}.dump"
40-
if [ "$SET_DATE_AT_NAME" == "true" ]; then
41-
local CURRENT_DATE=$(date '+%Y%m%d-%H%M')
42-
LOCAL_BACKUP_FILE="${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.dump"
43-
CLOUD_BACKUP_FILE="${BACKUP_CLOUD_FOLDER}/${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.dump"
44-
fi
38+
local LOCAL_BACKUP_FILE="${BACKUP_CLOUD_FILE}.dump"
39+
local LOCAL_BACKUP_FILE_GZIP="${BACKUP_CLOUD_FILE}.dump.gz"
40+
local CLOUD_BACKUP_FILE="${BACKUP_CLOUD_FOLDER}/${BACKUP_CLOUD_FILE}.dump.gz"
4541

46-
# Backup database with max compression
47-
echo "Backing up DB ${POSTGRES_DB} into ${LOCAL_BACKUP_FILE}"
48-
# pg_dump -h ${POSTGRES_HOST} -U ${POSTGRES_USER} ${POSTGRES_DB} | gzip -9 >${LOCAL_BACKUP_FILE}
49-
pg_dump -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -Fc -f ${LOCAL_BACKUP_FILE} ${POSTGRES_DB}
42+
if [ "$SET_DATE_AT_NAME" == "true" ]; then
43+
local CURRENT_DATE
44+
CURRENT_DATE=$(date '+%Y%m%d-%H%M')
45+
LOCAL_BACKUP_FILE="${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.dump"
46+
LOCAL_BACKUP_FILE_GZIP="${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.dump.gz"
47+
CLOUD_BACKUP_FILE="${BACKUP_CLOUD_FOLDER}/${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.dump.gz"
48+
fi
5049

51-
# Handle cloud storage based on the provider
52-
cloudStorageOps "${LOCAL_BACKUP_FILE}" "${CLOUD_BACKUP_FILE}"
50+
# Backup database with pg_dump custom format (-Fc) + gzip
51+
echo "Backing up DB ${POSTGRES_DB} into ${LOCAL_BACKUP_FILE_GZIP}"
52+
pg_dump -h "${POSTGRES_HOST}" -U "${POSTGRES_USER}" -Fc "${POSTGRES_DB}" | gzip -9 > "${LOCAL_BACKUP_FILE}.gz"
53+
cloudStorageOps "${LOCAL_BACKUP_FILE_GZIP}" "${CLOUD_BACKUP_FILE}"
5354
}
5455

5556
restoreDB() {

images/planet-dump/start.sh

Lines changed: 40 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -23,23 +23,50 @@ if [ "$OVERWRITE_PLANET_FILE" == "true" ]; then
2323
cloud_planetPBFFile=planet/planet-latest.osm.pbf
2424
fi
2525

26+
# ===============================
27+
# Download db .dump file
28+
# ===============================
2629
# ===============================
2730
# Download db .dump file
2831
# ===============================
2932
download_dump_file() {
30-
echo "Downloading db .dump file from cloud..."
31-
if [ "$CLOUDPROVIDER" == "aws" ]; then
32-
if [[ "$DUMP_CLOUD_URL" == *.txt ]]; then
33-
temp_txt="$VOLUME_DIR/tmp_dump_url.txt"
34-
aws s3 cp "$DUMP_CLOUD_URL" "$temp_txt"
35-
first_line=$(head -n 1 "$temp_txt")
36-
aws s3 cp "$first_line" "$dumpFile"
37-
else
38-
aws s3 cp "$DUMP_CLOUD_URL" "$dumpFile"
39-
fi
40-
elif [ "$CLOUDPROVIDER" == "gcp" ]; then
41-
gsutil cp "$DUMP_CLOUD_URL" "$dumpFile"
42-
fi
33+
echo "Downloading db .dump file from cloud..."
34+
if [ "$CLOUDPROVIDER" == "aws" ]; then
35+
if [[ "$DUMP_CLOUD_URL" == *.txt ]]; then
36+
# Download the .txt file containing the URL
37+
temp_txt="$VOLUME_DIR/tmp_dump_url.txt"
38+
aws s3 cp "$DUMP_CLOUD_URL" "$temp_txt"
39+
40+
# Get the first line (S3 URL to the .dump or .dump.gz file)
41+
first_line=$(head -n 1 "$temp_txt")
42+
echo "Found dump URL in txt: $first_line"
43+
44+
aws s3 cp "$first_line" "$dumpFile"
45+
46+
# Check if it's compressed (.gz) and decompress
47+
if [[ "$first_line" == *.gz ]]; then
48+
echo "Decompressing gzip file..."
49+
gunzip -f "$dumpFile"
50+
dumpFile="${dumpFile%.gz}"
51+
fi
52+
else
53+
aws s3 cp "$DUMP_CLOUD_URL" "$dumpFile"
54+
# If it's compressed, decompress
55+
if [[ "$DUMP_CLOUD_URL" == *.gz ]]; then
56+
echo "Decompressing gzip file..."
57+
gunzip -f "$dumpFile"
58+
dumpFile="${dumpFile%.gz}"
59+
fi
60+
fi
61+
62+
elif [ "$CLOUDPROVIDER" == "gcp" ]; then
63+
gsutil cp "$DUMP_CLOUD_URL" "$dumpFile"
64+
else
65+
echo "Unsupported CLOUDPROVIDER: $CLOUDPROVIDER"
66+
exit 1
67+
fi
68+
69+
echo "Dump file ready at: $dumpFile"
4370
}
4471

4572

0 commit comments

Comments
 (0)