#!/bin/bash # === Configuration === BUCKET_NAME="ibc-db-backup" ENDPOINT_URL="https://s3.eu-central-3.ionoscloud.com" LOCAL_DOWNLOAD_DIR="./" # Where to save the file # === Check if a custom file is given as a command line argument === if [ $# -eq 1 ]; then CUSTOM_FILE="$1" echo "๐Ÿ” Using custom file: $CUSTOM_FILE" # Check if the file exists if [ ! -f "$CUSTOM_FILE" ]; then echo "โŒ Custom file does not exist: $CUSTOM_FILE" exit 1 fi LATEST_FILE="$CUSTOM_FILE" FILENAME=$(basename "$LATEST_FILE") SQL_FILE="${FILENAME%.br}" # Remove .br suffix else echo "๐Ÿ” No custom file provided, searching for latest .sql.br file in S3" # === Get latest file from IONOS S3 bucket === LATEST_FILE=$(aws s3api list-objects-v2 \ --bucket "$BUCKET_NAME" \ --prefix "data-dump" \ --endpoint-url "$ENDPOINT_URL" \ --query 'Contents | sort_by(@, &LastModified) | [-1].Key' \ --output text) # === Check if file was found === if [ "$LATEST_FILE" == "None" ] || [ -z "$LATEST_FILE" ]; then echo "โŒ No matching .sql.br file found." exit 1 fi echo "๐Ÿ” Latest file found: $LATEST_FILE" FILENAME=$(basename "$LATEST_FILE") SQL_FILE="${FILENAME%.br}" # Remove .br suffix echo "๐Ÿ“ฅ Downloading $LATEST_FILE" aws s3 cp "s3://$BUCKET_NAME/$LATEST_FILE" "$LOCAL_DOWNLOAD_DIR" \ --endpoint-url "$ENDPOINT_URL" fi # === Decompress with Brotli === echo "๐Ÿ—œ๏ธ Decompressing $FILENAME -> $SQL_FILE" brotli -d "$FILENAME" # === Import into Postgres inside Docker === echo "๐Ÿ˜ Importing into PostgreSQL (online-energieausweis-database-1:main)" docker exec -i "online-energieausweis-database-1" env PGPASSWORD="hHMP8cd^N3SnzGRR" \ psql -U "main" -d "main" < "$SQL_FILE" echo "โœ… Import complete." # === Optional: Clean up # If custom file was provided, do not delete it if [ -z "$CUSTOM_FILE" ]; then echo "๐Ÿงน Cleaning up downloaded files..." rm "$FILENAME" "$SQL_FILE" fi