Initial commit: BC backup project
This commit is contained in:
237
bc-backup.sh
Executable file
237
bc-backup.sh
Executable file
@@ -0,0 +1,237 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Business Central SaaS Automated Backup Script
|
||||
# Downloads BC database export, encrypts, and uploads to S3 with immutability
|
||||
#
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Script directory
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CONFIG_FILE="${SCRIPT_DIR}/bc-backup.conf"
|
||||
LOG_DIR="${SCRIPT_DIR}/logs"
|
||||
WORK_DIR="${SCRIPT_DIR}/temp"
|
||||
|
||||
# Ensure log directory exists
|
||||
mkdir -p "${LOG_DIR}"
|
||||
mkdir -p "${WORK_DIR}"
|
||||
|
||||
# Logging function
|
||||
log() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "${LOG_DIR}/backup.log"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: $*" | tee -a "${LOG_DIR}/backup.log" >&2
|
||||
}
|
||||
|
||||
# Load configuration
|
||||
if [[ ! -f "${CONFIG_FILE}" ]]; then
|
||||
log_error "Configuration file not found: ${CONFIG_FILE}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
source "${CONFIG_FILE}"
|
||||
|
||||
# Validate required configuration
|
||||
required_vars=(
|
||||
"AZURE_TENANT_ID"
|
||||
"AZURE_CLIENT_ID"
|
||||
"AZURE_CLIENT_SECRET"
|
||||
"BC_ENVIRONMENT_NAME"
|
||||
"ENCRYPTION_PASSPHRASE"
|
||||
"S3_BUCKET"
|
||||
"S3_ENDPOINT"
|
||||
"AWS_ACCESS_KEY_ID"
|
||||
"AWS_SECRET_ACCESS_KEY"
|
||||
)
|
||||
|
||||
for var in "${required_vars[@]}"; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
log_error "Required configuration variable not set: ${var}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Set defaults
|
||||
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
||||
S3_TOOL="${S3_TOOL:-awscli}"
|
||||
MAX_RETRIES="${MAX_RETRIES:-3}"
|
||||
CLEANUP_LOCAL="${CLEANUP_LOCAL:-true}"
|
||||
BC_API_VERSION="${BC_API_VERSION:-v2.15}"
|
||||
|
||||
log "========================================="
|
||||
log "Starting Business Central backup process"
|
||||
log "========================================="
|
||||
log "Environment: ${BC_ENVIRONMENT_NAME}"
|
||||
log "S3 Bucket: ${S3_BUCKET}"
|
||||
log "Retention: ${RETENTION_DAYS} days"
|
||||
|
||||
# Generate timestamp for backup filename
|
||||
TIMESTAMP=$(date '+%Y%m%d_%H%M%S')
|
||||
BACKUP_FILENAME="bc_backup_${BC_ENVIRONMENT_NAME}_${TIMESTAMP}"
|
||||
|
||||
# Step 1: Export database using PowerShell script
|
||||
log "Step 1: Initiating database export via BC Admin Center API"
|
||||
|
||||
export AZURE_TENANT_ID
|
||||
export AZURE_CLIENT_ID
|
||||
export AZURE_CLIENT_SECRET
|
||||
export BC_ENVIRONMENT_NAME
|
||||
export BC_API_VERSION
|
||||
export WORK_DIR
|
||||
|
||||
BACPAC_FILE="${WORK_DIR}/${BACKUP_FILENAME}.bacpac"
|
||||
|
||||
if ! pwsh -File "${SCRIPT_DIR}/bc-export.ps1" -OutputPath "${BACPAC_FILE}"; then
|
||||
log_error "Database export failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "${BACPAC_FILE}" ]]; then
|
||||
log_error "BACPAC file not found after export: ${BACPAC_FILE}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BACPAC_SIZE=$(du -h "${BACPAC_FILE}" | cut -f1)
|
||||
log "Database export completed successfully (${BACPAC_SIZE})"
|
||||
|
||||
# Step 2: Encrypt the backup
|
||||
log "Step 2: Encrypting backup file with GPG"
|
||||
|
||||
ENCRYPTED_FILE="${BACPAC_FILE}.gpg"
|
||||
|
||||
if ! echo "${ENCRYPTION_PASSPHRASE}" | gpg \
|
||||
--batch \
|
||||
--yes \
|
||||
--passphrase-fd 0 \
|
||||
--symmetric \
|
||||
--cipher-algo AES256 \
|
||||
--compress-algo none \
|
||||
--output "${ENCRYPTED_FILE}" \
|
||||
"${BACPAC_FILE}"; then
|
||||
log_error "Encryption failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ENCRYPTED_SIZE=$(du -h "${ENCRYPTED_FILE}" | cut -f1)
|
||||
log "Encryption completed successfully (${ENCRYPTED_SIZE})"
|
||||
|
||||
# Remove unencrypted BACPAC
|
||||
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
|
||||
rm -f "${BACPAC_FILE}"
|
||||
log "Removed unencrypted BACPAC file"
|
||||
fi
|
||||
|
||||
# Step 3: Upload to S3 with object lock
|
||||
log "Step 3: Uploading encrypted backup to S3"
|
||||
|
||||
S3_KEY="backups/${BACKUP_FILENAME}.bacpac.gpg"
|
||||
S3_URI="s3://${S3_BUCKET}/${S3_KEY}"
|
||||
|
||||
# Calculate retention date
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS date command
|
||||
RETENTION_DATE=$(date -u -v+${RETENTION_DAYS}d '+%Y-%m-%dT%H:%M:%S')
|
||||
else
|
||||
# Linux date command
|
||||
RETENTION_DATE=$(date -u -d "+${RETENTION_DAYS} days" '+%Y-%m-%dT%H:%M:%S')
|
||||
fi
|
||||
|
||||
upload_success=false
|
||||
|
||||
if [[ "${S3_TOOL}" == "awscli" ]]; then
|
||||
log "Using AWS CLI for upload"
|
||||
|
||||
# Upload with object lock retention
|
||||
if aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${S3_KEY}" \
|
||||
--body "${ENCRYPTED_FILE}" \
|
||||
--endpoint-url "${S3_ENDPOINT}" \
|
||||
--object-lock-mode COMPLIANCE \
|
||||
--object-lock-retain-until-date "${RETENTION_DATE}Z" \
|
||||
--metadata "backup-timestamp=${TIMESTAMP},environment=${BC_ENVIRONMENT_NAME},encrypted=true"; then
|
||||
upload_success=true
|
||||
fi
|
||||
|
||||
elif [[ "${S3_TOOL}" == "s3cmd" ]]; then
|
||||
log "Using s3cmd for upload"
|
||||
|
||||
# Upload file first
|
||||
if s3cmd put \
|
||||
--host="${S3_ENDPOINT#*://}" \
|
||||
--host-bucket="${S3_ENDPOINT#*://}" \
|
||||
"${ENCRYPTED_FILE}" \
|
||||
"${S3_URI}"; then
|
||||
|
||||
log "File uploaded, attempting to set object lock retention"
|
||||
# Note: s3cmd may not support object lock natively
|
||||
# Fallback to aws cli for setting retention if available
|
||||
if command -v aws &> /dev/null; then
|
||||
aws s3api put-object-retention \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${S3_KEY}" \
|
||||
--endpoint-url "${S3_ENDPOINT}" \
|
||||
--retention Mode=COMPLIANCE,RetainUntilDate="${RETENTION_DATE}Z" || \
|
||||
log_error "Warning: Could not set object lock retention via AWS CLI"
|
||||
else
|
||||
log_error "Warning: s3cmd doesn't support object lock. Install aws-cli for full functionality"
|
||||
fi
|
||||
upload_success=true
|
||||
fi
|
||||
else
|
||||
log_error "Invalid S3_TOOL: ${S3_TOOL}. Must be 'awscli' or 's3cmd'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${upload_success}" == "true" ]]; then
|
||||
log "Upload completed successfully: ${S3_URI}"
|
||||
log "Object lock retention until: ${RETENTION_DATE}Z"
|
||||
else
|
||||
log_error "Upload failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 4: Verify upload
|
||||
log "Step 4: Verifying upload"
|
||||
|
||||
if [[ "${S3_TOOL}" == "awscli" ]]; then
|
||||
if aws s3api head-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${S3_KEY}" \
|
||||
--endpoint-url "${S3_ENDPOINT}" > /dev/null 2>&1; then
|
||||
log "Upload verification successful"
|
||||
else
|
||||
log_error "Upload verification failed"
|
||||
exit 1
|
||||
fi
|
||||
elif [[ "${S3_TOOL}" == "s3cmd" ]]; then
|
||||
if s3cmd info "${S3_URI}" --host="${S3_ENDPOINT#*://}" > /dev/null 2>&1; then
|
||||
log "Upload verification successful"
|
||||
else
|
||||
log_error "Upload verification failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Step 5: Cleanup
|
||||
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
|
||||
log "Step 5: Cleaning up local files"
|
||||
rm -f "${ENCRYPTED_FILE}"
|
||||
log "Local encrypted file removed"
|
||||
else
|
||||
log "Step 5: Skipping cleanup (CLEANUP_LOCAL=false)"
|
||||
log "Encrypted backup retained at: ${ENCRYPTED_FILE}"
|
||||
fi
|
||||
|
||||
# Log rotation - keep last 30 days of logs
|
||||
find "${LOG_DIR}" -name "backup.log.*" -mtime +30 -delete 2>/dev/null || true
|
||||
|
||||
log "========================================="
|
||||
log "Backup completed successfully"
|
||||
log "Backup file: ${S3_KEY}"
|
||||
log "Size: ${ENCRYPTED_SIZE}"
|
||||
log "========================================="
|
||||
|
||||
exit 0
|
||||
Reference in New Issue
Block a user