diff --git a/backup-database.bash b/backup-database.bash index 5348a744..306038be 100644 --- a/backup-database.bash +++ b/backup-database.bash @@ -12,15 +12,15 @@ export AWS_RESPONSE_CHECKSUM_VALIDATION=when_required # IMPORTANT: Dieser Befehl benötigt das `ionos` Profil, sonst wird er nicht funktionieren. # Das Profil kann mit `aws configure --profile ionos` erstellt werden. # Den Key dafür findet man auf https://dcd.ionos.com/latest/?lang=en#/key-management -docker exec -t $DATABASE_NAME pg_dump --data-only -U main main | brotli --best > $FILE_NAME +docker exec -t $DATABASE_NAME pg_dump --data-only -U main main | brotli --quality=3 > $FILE_NAME aws s3 cp $FILE_NAME s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3.eu-central-3.ionoscloud.com --storage-class STANDARD echo "Uploaded $FILE_NAME" -docker exec -t $DATABASE_NAME pg_dumpall -c -U main | brotli --best > $FILE_NAME_COMPLETE +docker exec -t $DATABASE_NAME pg_dumpall -c -U main | brotli --quality=3 > $FILE_NAME_COMPLETE -aws s3 cp $FILE_NAME_COMPLETE s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3-eu-central-3.ionoscloud.com --storage-class STANDARD +aws s3 cp $FILE_NAME_COMPLETE s3://ibc-db-backup/ --profile ionos --endpoint-url https://s3.eu-central-3.ionoscloud.com --storage-class STANDARD echo "Uploaded $FILE_NAME_COMPLETE" diff --git a/docker-compose.yml b/docker-compose.yml index a96bb452..1b0d1117 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,9 @@ version: '3' services: database: + container_name: database + image: postgres:17.5 + build: ./ restart: always env_file: diff --git a/recover-db-dev.bash b/recover-db-dev.bash index bc278caf..7b931b23 100644 --- a/recover-db-dev.bash +++ b/recover-db-dev.bash @@ -4,19 +4,35 @@ BUCKET_NAME="ibc-db-backup" ENDPOINT_URL="https://s3.eu-central-3.ionoscloud.com" LOCAL_DOWNLOAD_DIR="./" # Where to save the file +DATABASE_NAME=database # === Check if a custom file is given as a command line argument === if [ $# -eq 1 ]; then CUSTOM_FILE="$1" echo "🔍 Using custom file: $CUSTOM_FILE" - # Check if the file exists + # Check if file exists locally if [ ! -f "$CUSTOM_FILE" ]; then - echo "❌ Custom file does not exist: $CUSTOM_FILE" - exit 1 + # Check if the file exists on the remote + if ! aws s3api head-object --bucket "$BUCKET_NAME" --key "$CUSTOM_FILE" --endpoint-url "$ENDPOINT_URL" > /dev/null 2>&1; then + echo "❌ Custom file does not exist in S3 bucket or locally." + exit 1 + else + echo "📥 Downloading $CUSTOM_FILE from S3" + aws s3 cp "s3://$BUCKET_NAME/$CUSTOM_FILE" "$LOCAL_DOWNLOAD_DIR" \ + --endpoint-url "$ENDPOINT_URL" + fi fi + LATEST_FILE="$CUSTOM_FILE" FILENAME=$(basename "$LATEST_FILE") - SQL_FILE="${FILENAME%.br}" # Remove .br suffix + if [[ "$FILENAME" == *.br ]]; then + echo "🗜️ Detected compressed file: $FILENAME" + # Remove the .br suffix for the SQL file + SQL_FILE="${FILENAME%.br}" # Remove .br suffix + brotli -d "$FILENAME" + else + SQL_FILE=$FILENAME + fi else echo "🔍 No custom file provided, searching for latest .sql.br file in S3" @@ -36,20 +52,21 @@ else echo "🔍 Latest file found: $LATEST_FILE" FILENAME=$(basename "$LATEST_FILE") SQL_FILE="${FILENAME%.br}" # Remove .br suffix - echo "📥 Downloading $LATEST_FILE" aws s3 cp "s3://$BUCKET_NAME/$LATEST_FILE" "$LOCAL_DOWNLOAD_DIR" \ --endpoint-url "$ENDPOINT_URL" + + brotli -d "$FILENAME" + echo "🗜️ Decompressed to $SQL_FILE" fi -# === Decompress with Brotli === -echo "🗜️ Decompressing $FILENAME -> $SQL_FILE" -brotli -d "$FILENAME" # === Import into Postgres inside Docker === -echo "🐘 Importing into PostgreSQL (online-energieausweis-database-1:main)" -docker exec -i "online-energieausweis-database-1" env PGPASSWORD="hHMP8cd^N3SnzGRR" \ - psql -U "main" -d "main" < "$SQL_FILE" +echo "🐘 Importing into PostgreSQL ($DATABASE_NAME:main)" +docker exec -i "$DATABASE_NAME" env PGPASSWORD="hHMP8cd^N3SnzGRR" \ + psql -v ON_ERROR_STOP=0 -U main -d main < "$SQL_FILE" + + echo "✅ Import complete." diff --git a/src/astro-typesafe-api-caller.ts b/src/astro-typesafe-api-caller.ts index eac47bb0..a440b7ab 100644 --- a/src/astro-typesafe-api-caller.ts +++ b/src/astro-typesafe-api-caller.ts @@ -17,20 +17,20 @@ export const createCaller = createCallerFactory({ "auth/access-token": await import("../src/pages/api/auth/access-token.ts"), "auth/passwort-vergessen": await import("../src/pages/api/auth/passwort-vergessen.ts"), "auth/refresh-token": await import("../src/pages/api/auth/refresh-token.ts"), - "bedarfsausweis-wohnen/[id]": await import("../src/pages/api/bedarfsausweis-wohnen/[id].ts"), - "bedarfsausweis-wohnen": await import("../src/pages/api/bedarfsausweis-wohnen/index.ts"), "bedarfsausweis-gewerbe/[id]": await import("../src/pages/api/bedarfsausweis-gewerbe/[id].ts"), "bedarfsausweis-gewerbe": await import("../src/pages/api/bedarfsausweis-gewerbe/index.ts"), + "bedarfsausweis-wohnen/[id]": await import("../src/pages/api/bedarfsausweis-wohnen/[id].ts"), + "bedarfsausweis-wohnen": await import("../src/pages/api/bedarfsausweis-wohnen/index.ts"), "bilder/[id]": await import("../src/pages/api/bilder/[id].ts"), "geg-nachweis-gewerbe/[id]": await import("../src/pages/api/geg-nachweis-gewerbe/[id].ts"), "geg-nachweis-gewerbe": await import("../src/pages/api/geg-nachweis-gewerbe/index.ts"), "geg-nachweis-wohnen/[id]": await import("../src/pages/api/geg-nachweis-wohnen/[id].ts"), "geg-nachweis-wohnen": await import("../src/pages/api/geg-nachweis-wohnen/index.ts"), "objekt": await import("../src/pages/api/objekt/index.ts"), - "ticket": await import("../src/pages/api/ticket/index.ts"), "rechnung/[id]": await import("../src/pages/api/rechnung/[id].ts"), "rechnung/anfordern": await import("../src/pages/api/rechnung/anfordern.ts"), "rechnung": await import("../src/pages/api/rechnung/index.ts"), + "ticket": await import("../src/pages/api/ticket/index.ts"), "user": await import("../src/pages/api/user/index.ts"), "user/self": await import("../src/pages/api/user/self.ts"), "verbrauchsausweis-gewerbe/[id]": await import("../src/pages/api/verbrauchsausweis-gewerbe/[id].ts"), diff --git a/src/pages/dashboard/objekte/index.astro b/src/pages/dashboard/objekte/index.astro index d1216236..fc96ff3c 100644 --- a/src/pages/dashboard/objekte/index.astro +++ b/src/pages/dashboard/objekte/index.astro @@ -8,6 +8,7 @@ const page = Number(Astro.url.searchParams.get("p")); const user = await getCurrentUser(Astro); + if (!user) { return Astro.redirect("/auth/login"); } @@ -23,7 +24,7 @@ const totalPageCount = await prisma.aufnahme.count({ : {}, }); -if (page < 1 || page > totalPageCount && totalPageCount > 0) { +if (page < 1 || page > totalPageCount) { return Astro.redirect("/dashboard/objekte?p=1"); } diff --git a/wipe-database.bash b/wipe-database.bash index 13d4c8a3..5ccc60f2 100644 --- a/wipe-database.bash +++ b/wipe-database.bash @@ -3,7 +3,7 @@ set -e # Config -CONTAINER_NAME="online-energieausweis-database-1" +CONTAINER_NAME="database" DB_USER="main" DB_NAME="main" TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S") @@ -39,40 +39,16 @@ fi if [[ "$SKIP_BACKUP" == false ]]; then echo "📦 Backup wird erstellt..." - docker exec -t "$CONTAINER_NAME" pg_dumpall -c -U "$DB_USER" | brotli > "$FILE_NAME" + docker exec -t "$CONTAINER_NAME" pg_dumpall -c -U "$DB_USER" | brotli --quality=1 > "$FILE_NAME" echo "✅ Backup abgeschlossen: $FILE_NAME" fi echo "🧨 Alle Daten aus allen Tabellen werden gelöscht..." # Generate and run TRUNCATE statements for all tables in the public schema -docker exec -i "$CONTAINER_NAME" psql -U "$DB_USER" "$DB_NAME" <<'EOSQL' -DO $$ -DECLARE - r RECORD; - sql TEXT := ''; -BEGIN - -- Truncate all tables - FOR r IN - SELECT tablename - FROM pg_tables - WHERE schemaname = 'public' - LOOP - sql := sql || FORMAT('TRUNCATE TABLE public.%I CASCADE;', r.tablename); - END LOOP; - - -- Drop all sequences - FOR r IN - SELECT sequence_name - FROM information_schema.sequences - WHERE sequence_schema = 'public' - LOOP - sql := sql || FORMAT('DROP SEQUENCE IF EXISTS public.%I CASCADE;', r.sequence_name); - END LOOP; - - EXECUTE sql; -END -$$; +docker exec -i "$CONTAINER_NAME" psql -U "$DB_USER" "postgres" <<'EOSQL' +DROP DATABASE IF EXISTS main; +CREATE DATABASE main WITH OWNER main ENCODING 'UTF8'; EOSQL echo "✅ Alle Tabellen gelöscht und Schema zurückgesetzt."