Fix?
This commit is contained in:
@@ -12,15 +12,15 @@ export AWS_RESPONSE_CHECKSUM_VALIDATION=when_required
|
|||||||
# IMPORTANT: Dieser Befehl benötigt das `ionos` Profil, sonst wird er nicht funktionieren.
|
# IMPORTANT: Dieser Befehl benötigt das `ionos` Profil, sonst wird er nicht funktionieren.
|
||||||
# Das Profil kann mit `aws configure --profile ionos` erstellt werden.
|
# Das Profil kann mit `aws configure --profile ionos` erstellt werden.
|
||||||
# Den Key dafür findet man auf https://dcd.ionos.com/latest/?lang=en#/key-management
|
# Den Key dafür findet man auf https://dcd.ionos.com/latest/?lang=en#/key-management
|
||||||
docker exec -t $DATABASE_NAME pg_dump --data-only -U main main | brotli --best > $FILE_NAME
|
docker exec -t $DATABASE_NAME pg_dump --data-only -U main main | brotli --quality=3 > $FILE_NAME
|
||||||
|
|
||||||
aws s3 cp $FILE_NAME s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3.eu-central-3.ionoscloud.com --storage-class STANDARD
|
aws s3 cp $FILE_NAME s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3.eu-central-3.ionoscloud.com --storage-class STANDARD
|
||||||
|
|
||||||
echo "Uploaded $FILE_NAME"
|
echo "Uploaded $FILE_NAME"
|
||||||
|
|
||||||
docker exec -t $DATABASE_NAME pg_dumpall -c -U main | brotli --best > $FILE_NAME_COMPLETE
|
docker exec -t $DATABASE_NAME pg_dumpall -c -U main | brotli --quality=3 > $FILE_NAME_COMPLETE
|
||||||
|
|
||||||
aws s3 cp $FILE_NAME_COMPLETE s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3-eu-central-3.ionoscloud.com --storage-class STANDARD
|
aws s3 cp $FILE_NAME_COMPLETE s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3.eu-central-3.ionoscloud.com --storage-class STANDARD
|
||||||
|
|
||||||
echo "Uploaded $FILE_NAME_COMPLETE"
|
echo "Uploaded $FILE_NAME_COMPLETE"
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
version: '3'
|
version: '3'
|
||||||
services:
|
services:
|
||||||
database:
|
database:
|
||||||
|
container_name: database
|
||||||
|
image: postgres:17.5
|
||||||
|
|
||||||
build: ./
|
build: ./
|
||||||
restart: always
|
restart: always
|
||||||
env_file:
|
env_file:
|
||||||
|
|||||||
@@ -4,19 +4,35 @@
|
|||||||
BUCKET_NAME="ibc-db-backup"
|
BUCKET_NAME="ibc-db-backup"
|
||||||
ENDPOINT_URL="https://s3.eu-central-3.ionoscloud.com"
|
ENDPOINT_URL="https://s3.eu-central-3.ionoscloud.com"
|
||||||
LOCAL_DOWNLOAD_DIR="./" # Where to save the file
|
LOCAL_DOWNLOAD_DIR="./" # Where to save the file
|
||||||
|
DATABASE_NAME=database
|
||||||
|
|
||||||
# === Check if a custom file is given as a command line argument ===
|
# === Check if a custom file is given as a command line argument ===
|
||||||
if [ $# -eq 1 ]; then
|
if [ $# -eq 1 ]; then
|
||||||
CUSTOM_FILE="$1"
|
CUSTOM_FILE="$1"
|
||||||
echo "🔍 Using custom file: $CUSTOM_FILE"
|
echo "🔍 Using custom file: $CUSTOM_FILE"
|
||||||
# Check if the file exists
|
# Check if file exists locally
|
||||||
if [ ! -f "$CUSTOM_FILE" ]; then
|
if [ ! -f "$CUSTOM_FILE" ]; then
|
||||||
echo "❌ Custom file does not exist: $CUSTOM_FILE"
|
# Check if the file exists on the remote
|
||||||
|
if ! aws s3api head-object --bucket "$BUCKET_NAME" --key "$CUSTOM_FILE" --endpoint-url "$ENDPOINT_URL" > /dev/null 2>&1; then
|
||||||
|
echo "❌ Custom file does not exist in S3 bucket or locally."
|
||||||
exit 1
|
exit 1
|
||||||
|
else
|
||||||
|
echo "📥 Downloading $CUSTOM_FILE from S3"
|
||||||
|
aws s3 cp "s3://$BUCKET_NAME/$CUSTOM_FILE" "$LOCAL_DOWNLOAD_DIR" \
|
||||||
|
--endpoint-url "$ENDPOINT_URL"
|
||||||
fi
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
LATEST_FILE="$CUSTOM_FILE"
|
LATEST_FILE="$CUSTOM_FILE"
|
||||||
FILENAME=$(basename "$LATEST_FILE")
|
FILENAME=$(basename "$LATEST_FILE")
|
||||||
|
if [[ "$FILENAME" == *.br ]]; then
|
||||||
|
echo "🗜️ Detected compressed file: $FILENAME"
|
||||||
|
# Remove the .br suffix for the SQL file
|
||||||
SQL_FILE="${FILENAME%.br}" # Remove .br suffix
|
SQL_FILE="${FILENAME%.br}" # Remove .br suffix
|
||||||
|
brotli -d "$FILENAME"
|
||||||
|
else
|
||||||
|
SQL_FILE=$FILENAME
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "🔍 No custom file provided, searching for latest .sql.br file in S3"
|
echo "🔍 No custom file provided, searching for latest .sql.br file in S3"
|
||||||
|
|
||||||
@@ -36,20 +52,21 @@ else
|
|||||||
echo "🔍 Latest file found: $LATEST_FILE"
|
echo "🔍 Latest file found: $LATEST_FILE"
|
||||||
FILENAME=$(basename "$LATEST_FILE")
|
FILENAME=$(basename "$LATEST_FILE")
|
||||||
SQL_FILE="${FILENAME%.br}" # Remove .br suffix
|
SQL_FILE="${FILENAME%.br}" # Remove .br suffix
|
||||||
|
|
||||||
echo "📥 Downloading $LATEST_FILE"
|
echo "📥 Downloading $LATEST_FILE"
|
||||||
aws s3 cp "s3://$BUCKET_NAME/$LATEST_FILE" "$LOCAL_DOWNLOAD_DIR" \
|
aws s3 cp "s3://$BUCKET_NAME/$LATEST_FILE" "$LOCAL_DOWNLOAD_DIR" \
|
||||||
--endpoint-url "$ENDPOINT_URL"
|
--endpoint-url "$ENDPOINT_URL"
|
||||||
|
|
||||||
|
brotli -d "$FILENAME"
|
||||||
|
echo "🗜️ Decompressed to $SQL_FILE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# === Decompress with Brotli ===
|
|
||||||
echo "🗜️ Decompressing $FILENAME -> $SQL_FILE"
|
|
||||||
brotli -d "$FILENAME"
|
|
||||||
|
|
||||||
# === Import into Postgres inside Docker ===
|
# === Import into Postgres inside Docker ===
|
||||||
echo "🐘 Importing into PostgreSQL (online-energieausweis-database-1:main)"
|
echo "🐘 Importing into PostgreSQL ($DATABASE_NAME:main)"
|
||||||
docker exec -i "online-energieausweis-database-1" env PGPASSWORD="hHMP8cd^N3SnzGRR" \
|
docker exec -i "$DATABASE_NAME" env PGPASSWORD="hHMP8cd^N3SnzGRR" \
|
||||||
psql -U "main" -d "main" < "$SQL_FILE"
|
psql -v ON_ERROR_STOP=0 -U main -d main < "$SQL_FILE"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
echo "✅ Import complete."
|
echo "✅ Import complete."
|
||||||
|
|
||||||
|
|||||||
@@ -17,20 +17,20 @@ export const createCaller = createCallerFactory({
|
|||||||
"auth/access-token": await import("../src/pages/api/auth/access-token.ts"),
|
"auth/access-token": await import("../src/pages/api/auth/access-token.ts"),
|
||||||
"auth/passwort-vergessen": await import("../src/pages/api/auth/passwort-vergessen.ts"),
|
"auth/passwort-vergessen": await import("../src/pages/api/auth/passwort-vergessen.ts"),
|
||||||
"auth/refresh-token": await import("../src/pages/api/auth/refresh-token.ts"),
|
"auth/refresh-token": await import("../src/pages/api/auth/refresh-token.ts"),
|
||||||
"bedarfsausweis-wohnen/[id]": await import("../src/pages/api/bedarfsausweis-wohnen/[id].ts"),
|
|
||||||
"bedarfsausweis-wohnen": await import("../src/pages/api/bedarfsausweis-wohnen/index.ts"),
|
|
||||||
"bedarfsausweis-gewerbe/[id]": await import("../src/pages/api/bedarfsausweis-gewerbe/[id].ts"),
|
"bedarfsausweis-gewerbe/[id]": await import("../src/pages/api/bedarfsausweis-gewerbe/[id].ts"),
|
||||||
"bedarfsausweis-gewerbe": await import("../src/pages/api/bedarfsausweis-gewerbe/index.ts"),
|
"bedarfsausweis-gewerbe": await import("../src/pages/api/bedarfsausweis-gewerbe/index.ts"),
|
||||||
|
"bedarfsausweis-wohnen/[id]": await import("../src/pages/api/bedarfsausweis-wohnen/[id].ts"),
|
||||||
|
"bedarfsausweis-wohnen": await import("../src/pages/api/bedarfsausweis-wohnen/index.ts"),
|
||||||
"bilder/[id]": await import("../src/pages/api/bilder/[id].ts"),
|
"bilder/[id]": await import("../src/pages/api/bilder/[id].ts"),
|
||||||
"geg-nachweis-gewerbe/[id]": await import("../src/pages/api/geg-nachweis-gewerbe/[id].ts"),
|
"geg-nachweis-gewerbe/[id]": await import("../src/pages/api/geg-nachweis-gewerbe/[id].ts"),
|
||||||
"geg-nachweis-gewerbe": await import("../src/pages/api/geg-nachweis-gewerbe/index.ts"),
|
"geg-nachweis-gewerbe": await import("../src/pages/api/geg-nachweis-gewerbe/index.ts"),
|
||||||
"geg-nachweis-wohnen/[id]": await import("../src/pages/api/geg-nachweis-wohnen/[id].ts"),
|
"geg-nachweis-wohnen/[id]": await import("../src/pages/api/geg-nachweis-wohnen/[id].ts"),
|
||||||
"geg-nachweis-wohnen": await import("../src/pages/api/geg-nachweis-wohnen/index.ts"),
|
"geg-nachweis-wohnen": await import("../src/pages/api/geg-nachweis-wohnen/index.ts"),
|
||||||
"objekt": await import("../src/pages/api/objekt/index.ts"),
|
"objekt": await import("../src/pages/api/objekt/index.ts"),
|
||||||
"ticket": await import("../src/pages/api/ticket/index.ts"),
|
|
||||||
"rechnung/[id]": await import("../src/pages/api/rechnung/[id].ts"),
|
"rechnung/[id]": await import("../src/pages/api/rechnung/[id].ts"),
|
||||||
"rechnung/anfordern": await import("../src/pages/api/rechnung/anfordern.ts"),
|
"rechnung/anfordern": await import("../src/pages/api/rechnung/anfordern.ts"),
|
||||||
"rechnung": await import("../src/pages/api/rechnung/index.ts"),
|
"rechnung": await import("../src/pages/api/rechnung/index.ts"),
|
||||||
|
"ticket": await import("../src/pages/api/ticket/index.ts"),
|
||||||
"user": await import("../src/pages/api/user/index.ts"),
|
"user": await import("../src/pages/api/user/index.ts"),
|
||||||
"user/self": await import("../src/pages/api/user/self.ts"),
|
"user/self": await import("../src/pages/api/user/self.ts"),
|
||||||
"verbrauchsausweis-gewerbe/[id]": await import("../src/pages/api/verbrauchsausweis-gewerbe/[id].ts"),
|
"verbrauchsausweis-gewerbe/[id]": await import("../src/pages/api/verbrauchsausweis-gewerbe/[id].ts"),
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ const page = Number(Astro.url.searchParams.get("p"));
|
|||||||
|
|
||||||
const user = await getCurrentUser(Astro);
|
const user = await getCurrentUser(Astro);
|
||||||
|
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return Astro.redirect("/auth/login");
|
return Astro.redirect("/auth/login");
|
||||||
}
|
}
|
||||||
@@ -23,7 +24,7 @@ const totalPageCount = await prisma.aufnahme.count({
|
|||||||
: {},
|
: {},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (page < 1 || page > totalPageCount && totalPageCount > 0) {
|
if (page < 1 || page > totalPageCount) {
|
||||||
return Astro.redirect("/dashboard/objekte?p=1");
|
return Astro.redirect("/dashboard/objekte?p=1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Config
|
# Config
|
||||||
CONTAINER_NAME="online-energieausweis-database-1"
|
CONTAINER_NAME="database"
|
||||||
DB_USER="main"
|
DB_USER="main"
|
||||||
DB_NAME="main"
|
DB_NAME="main"
|
||||||
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
|
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
|
||||||
@@ -39,40 +39,16 @@ fi
|
|||||||
|
|
||||||
if [[ "$SKIP_BACKUP" == false ]]; then
|
if [[ "$SKIP_BACKUP" == false ]]; then
|
||||||
echo "📦 Backup wird erstellt..."
|
echo "📦 Backup wird erstellt..."
|
||||||
docker exec -t "$CONTAINER_NAME" pg_dumpall -c -U "$DB_USER" | brotli > "$FILE_NAME"
|
docker exec -t "$CONTAINER_NAME" pg_dumpall -c -U "$DB_USER" | brotli --quality=1 > "$FILE_NAME"
|
||||||
echo "✅ Backup abgeschlossen: $FILE_NAME"
|
echo "✅ Backup abgeschlossen: $FILE_NAME"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "🧨 Alle Daten aus allen Tabellen werden gelöscht..."
|
echo "🧨 Alle Daten aus allen Tabellen werden gelöscht..."
|
||||||
|
|
||||||
# Generate and run TRUNCATE statements for all tables in the public schema
|
# Generate and run TRUNCATE statements for all tables in the public schema
|
||||||
docker exec -i "$CONTAINER_NAME" psql -U "$DB_USER" "$DB_NAME" <<'EOSQL'
|
docker exec -i "$CONTAINER_NAME" psql -U "$DB_USER" "postgres" <<'EOSQL'
|
||||||
DO $$
|
DROP DATABASE IF EXISTS main;
|
||||||
DECLARE
|
CREATE DATABASE main WITH OWNER main ENCODING 'UTF8';
|
||||||
r RECORD;
|
|
||||||
sql TEXT := '';
|
|
||||||
BEGIN
|
|
||||||
-- Truncate all tables
|
|
||||||
FOR r IN
|
|
||||||
SELECT tablename
|
|
||||||
FROM pg_tables
|
|
||||||
WHERE schemaname = 'public'
|
|
||||||
LOOP
|
|
||||||
sql := sql || FORMAT('TRUNCATE TABLE public.%I CASCADE;', r.tablename);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Drop all sequences
|
|
||||||
FOR r IN
|
|
||||||
SELECT sequence_name
|
|
||||||
FROM information_schema.sequences
|
|
||||||
WHERE sequence_schema = 'public'
|
|
||||||
LOOP
|
|
||||||
sql := sql || FORMAT('DROP SEQUENCE IF EXISTS public.%I CASCADE;', r.sequence_name);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
EXECUTE sql;
|
|
||||||
END
|
|
||||||
$$;
|
|
||||||
EOSQL
|
EOSQL
|
||||||
|
|
||||||
echo "✅ Alle Tabellen gelöscht und Schema zurückgesetzt."
|
echo "✅ Alle Tabellen gelöscht und Schema zurückgesetzt."
|
||||||
|
|||||||
Reference in New Issue
Block a user