feat(db): add database backup and initialization system
- Add backup.sh script for manual PostgreSQL database backups - Update .gitignore to exclude pg_backups directory - Modify docker-compose.yml to include backup service and volume mounts - Add init.sh script for automated database initialization with backup restoration
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -32,6 +32,9 @@ yarn-error.log*
|
|||||||
pnpm-debug.log*
|
pnpm-debug.log*
|
||||||
lerna-debug.log*
|
lerna-debug.log*
|
||||||
|
|
||||||
|
#backups
|
||||||
|
pg_backups/
|
||||||
|
|
||||||
# Editor directories and files
|
# Editor directories and files
|
||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/extensions.json
|
!.vscode/extensions.json
|
||||||
|
|||||||
16
backup.sh
Normal file
16
backup.sh
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
# Adjust these as needed:
|
||||||
|
CONTAINER=worklenz_db
|
||||||
|
DB_NAME=worklenz_db
|
||||||
|
DB_USER=postgres
|
||||||
|
BACKUP_DIR=./pg_backups
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
timestamp=$(date +%Y-%m-%d_%H-%M-%S)
|
||||||
|
outfile="${BACKUP_DIR}/${DB_NAME}_${timestamp}.sql"
|
||||||
|
echo "Creating backup $outfile ..."
|
||||||
|
|
||||||
|
docker exec -t "$CONTAINER" pg_dump -U "$DB_USER" -d "$DB_NAME" > "$outfile"
|
||||||
|
echo "Backup saved to $outfile"
|
||||||
@@ -80,7 +80,11 @@ services:
|
|||||||
POSTGRES_DB: ${DB_NAME:-worklenz_db}
|
POSTGRES_DB: ${DB_NAME:-worklenz_db}
|
||||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-password}
|
POSTGRES_PASSWORD: ${DB_PASSWORD:-password}
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: [ "CMD-SHELL", "pg_isready -d ${DB_NAME:-worklenz_db} -U ${DB_USER:-postgres}" ]
|
test:
|
||||||
|
[
|
||||||
|
"CMD-SHELL",
|
||||||
|
"pg_isready -d ${DB_NAME:-worklenz_db} -U ${DB_USER:-postgres}",
|
||||||
|
]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -89,9 +93,20 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- worklenz_postgres_data:/var/lib/postgresql/data
|
- worklenz_postgres_data:/var/lib/postgresql/data
|
||||||
- type: bind
|
- type: bind
|
||||||
source: ./worklenz-backend/database
|
source: ./worklenz-backend/database/sql
|
||||||
target: /docker-entrypoint-initdb.d
|
target: /docker-entrypoint-initdb.d/sql
|
||||||
consistency: cached
|
consistency: cached
|
||||||
|
- type: bind
|
||||||
|
source: ./worklenz-backend/database/migrations
|
||||||
|
target: /docker-entrypoint-initdb.d/migrations
|
||||||
|
consistency: cached
|
||||||
|
- type: bind
|
||||||
|
source: ./worklenz-backend/database/init.sh
|
||||||
|
target: /docker-entrypoint-initdb.d/00_init.sh
|
||||||
|
consistency: cached
|
||||||
|
- type: bind
|
||||||
|
source: ./pg_backups
|
||||||
|
target: /docker-entrypoint-initdb.d/pg_backups
|
||||||
command: >
|
command: >
|
||||||
bash -c ' if command -v apt-get >/dev/null 2>&1; then
|
bash -c ' if command -v apt-get >/dev/null 2>&1; then
|
||||||
apt-get update && apt-get install -y dos2unix
|
apt-get update && apt-get install -y dos2unix
|
||||||
@@ -101,11 +116,34 @@ services:
|
|||||||
dos2unix "{}" 2>/dev/null || true
|
dos2unix "{}" 2>/dev/null || true
|
||||||
chmod +x "{}"
|
chmod +x "{}"
|
||||||
'\'' \; && exec docker-entrypoint.sh postgres '
|
'\'' \; && exec docker-entrypoint.sh postgres '
|
||||||
|
db-backup:
|
||||||
|
image: postgres:15
|
||||||
|
container_name: worklenz_db_backup
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${DB_USER:-postgres}
|
||||||
|
POSTGRES_DB: ${DB_NAME:-worklenz_db}
|
||||||
|
POSTGRES_PASSWORD: ${DB_PASSWORD:-password}
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
volumes:
|
||||||
|
- ./pg_backups:/pg_backups #host dir for backups files
|
||||||
|
#setup bassh loop to backup data evey 24h
|
||||||
|
command: >
|
||||||
|
bash -c "while true; do
|
||||||
|
sleep 86400;
|
||||||
|
PGPASSWORD=$$POSTGRES_PASSWORD pg_dump -h worklenz_db -U $$POSTGRES_USER -d $$POSTGRES_DB \
|
||||||
|
> /pg_backups/worklenz_db_$(date +%Y-%m-%d_%H-%M-%S).sql;
|
||||||
|
find /pg_backups -type f -name '*.sql' -mtime +30 -delete;
|
||||||
|
done"
|
||||||
|
restart: unless-stopped
|
||||||
|
networks:
|
||||||
|
- worklenz
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
worklenz_postgres_data:
|
worklenz_postgres_data:
|
||||||
worklenz_minio_data:
|
worklenz_minio_data:
|
||||||
|
pgdata:
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
worklenz:
|
worklenz:
|
||||||
|
|||||||
88
worklenz-backend/database/init.sh
Normal file
88
worklenz-backend/database/init.sh
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Starting database initialization..."
|
||||||
|
|
||||||
|
SQL_DIR="/docker-entrypoint-initdb.d/sql"
|
||||||
|
MIGRATIONS_DIR="/docker-entrypoint-initdb.d/migrations"
|
||||||
|
BACKUP_DIR="/docker-entrypoint-initdb.d/pg_backups"
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# 🗄️ STEP 1: Attempt to restore latest backup
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
if [ -d "$BACKUP_DIR" ]; then
|
||||||
|
LATEST_BACKUP=$(ls -t "$BACKUP_DIR"/*.sql 2>/dev/null | head -n 1)
|
||||||
|
else
|
||||||
|
LATEST_BACKUP=""
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$LATEST_BACKUP" ]; then
|
||||||
|
echo "🗄️ Found latest backup: $LATEST_BACKUP"
|
||||||
|
echo "⏳ Restoring from backup..."
|
||||||
|
psql -U "$POSTGRES_USER" -d "$POSTGRES_DB" < "$LATEST_BACKUP"
|
||||||
|
echo "✅ Backup restoration complete. Skipping schema and migrations."
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "ℹ️ No valid backup found. Proceeding with base schema and migrations."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# 🏗️ STEP 2: Continue with base schema setup
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
# Create migrations table if it doesn't exist
|
||||||
|
psql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||||
|
version TEXT PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMP DEFAULT now()
|
||||||
|
);
|
||||||
|
"
|
||||||
|
|
||||||
|
# List of base schema files to execute in order
|
||||||
|
BASE_SQL_FILES=(
|
||||||
|
"0_extensions.sql"
|
||||||
|
"1_tables.sql"
|
||||||
|
"indexes.sql"
|
||||||
|
"4_functions.sql"
|
||||||
|
"triggers.sql"
|
||||||
|
"3_views.sql"
|
||||||
|
"2_dml.sql"
|
||||||
|
"5_database_user.sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
echo "Running base schema SQL files in order..."
|
||||||
|
|
||||||
|
for file in "${BASE_SQL_FILES[@]}"; do
|
||||||
|
full_path="$SQL_DIR/$file"
|
||||||
|
if [ -f "$full_path" ]; then
|
||||||
|
echo "Executing $file..."
|
||||||
|
psql -v ON_ERROR_STOP=1 -U "$POSTGRES_USER" -d "$POSTGRES_DB" -f "$full_path"
|
||||||
|
else
|
||||||
|
echo "WARNING: $file not found, skipping."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✅ Base schema SQL execution complete."
|
||||||
|
|
||||||
|
# --------------------------------------------
|
||||||
|
# 🚀 STEP 3: Apply SQL migrations
|
||||||
|
# --------------------------------------------
|
||||||
|
|
||||||
|
if [ -d "$MIGRATIONS_DIR" ] && compgen -G "$MIGRATIONS_DIR/*.sql" > /dev/null; then
|
||||||
|
echo "Applying migrations..."
|
||||||
|
for f in "$MIGRATIONS_DIR"/*.sql; do
|
||||||
|
version=$(basename "$f")
|
||||||
|
if ! psql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -tAc "SELECT 1 FROM schema_migrations WHERE version = '$version'" | grep -q 1; then
|
||||||
|
echo "Applying migration: $version"
|
||||||
|
psql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -f "$f"
|
||||||
|
psql -U "$POSTGRES_USER" -d "$POSTGRES_DB" -c "INSERT INTO schema_migrations (version) VALUES ('$version');"
|
||||||
|
else
|
||||||
|
echo "Skipping already applied migration: $version"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "No migration files found or directory is empty, skipping migrations."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "🎉 Database initialization completed successfully."
|
||||||
Reference in New Issue
Block a user