The config file sets AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY as shell variables, but aws cli needs them as environment variables. Added explicit export statements in both bc-backup.sh and bc-cleanup.sh. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
338 lines
9.5 KiB
Bash
Executable File
338 lines
9.5 KiB
Bash
Executable File
#!/bin/bash
|
|
#
|
|
# Business Central SaaS Automated Backup Script
|
|
# Extracts BC data via API, encrypts, and uploads to S3 with immutability
|
|
# Supports full and incremental (delta) backup modes
|
|
#
|
|
|
|
set -euo pipefail
|
|
|
|
# Script directory
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
CONFIG_FILE="${SCRIPT_DIR}/bc-backup.conf"
|
|
LOG_DIR="${SCRIPT_DIR}/logs"
|
|
WORK_DIR="${SCRIPT_DIR}/temp"
|
|
STATE_FILE="${SCRIPT_DIR}/last-run-state.json"
|
|
LOCK_FILE="${SCRIPT_DIR}/.backup.lock"
|
|
|
|
# Ensure log directory exists
|
|
mkdir -p "${LOG_DIR}"
|
|
mkdir -p "${WORK_DIR}"
|
|
|
|
# Logging function
|
|
log() {
|
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "${LOG_DIR}/backup.log"
|
|
}
|
|
|
|
log_error() {
|
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: $*" | tee -a "${LOG_DIR}/backup.log" >&2
|
|
}
|
|
|
|
# Lock file management - prevent overlapping runs
|
|
cleanup() {
|
|
rm -f "${LOCK_FILE}"
|
|
}
|
|
|
|
if [[ -f "${LOCK_FILE}" ]]; then
|
|
lock_pid=$(cat "${LOCK_FILE}" 2>/dev/null || true)
|
|
if [[ -n "${lock_pid}" ]] && kill -0 "${lock_pid}" 2>/dev/null; then
|
|
log "Another backup is already running (PID ${lock_pid}), exiting"
|
|
exit 0
|
|
else
|
|
log "Stale lock file found (PID ${lock_pid} not running), removing"
|
|
rm -f "${LOCK_FILE}"
|
|
fi
|
|
fi
|
|
|
|
echo $$ > "${LOCK_FILE}"
|
|
trap cleanup EXIT
|
|
|
|
# Parse arguments
|
|
BACKUP_MODE=""
|
|
while [[ $# -gt 0 ]]; do
|
|
case "$1" in
|
|
--mode)
|
|
BACKUP_MODE="$2"
|
|
shift 2
|
|
;;
|
|
*)
|
|
log_error "Unknown argument: $1"
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Load configuration
|
|
if [[ ! -f "${CONFIG_FILE}" ]]; then
|
|
log_error "Configuration file not found: ${CONFIG_FILE}"
|
|
exit 1
|
|
fi
|
|
|
|
source "${CONFIG_FILE}"
|
|
|
|
# Use config default if no CLI arg
|
|
if [[ -z "${BACKUP_MODE}" ]]; then
|
|
BACKUP_MODE="${BACKUP_MODE_DEFAULT:-incremental}"
|
|
fi
|
|
|
|
# Validate mode
|
|
if [[ "${BACKUP_MODE}" != "full" && "${BACKUP_MODE}" != "incremental" ]]; then
|
|
log_error "Invalid backup mode: ${BACKUP_MODE}. Must be 'full' or 'incremental'"
|
|
exit 1
|
|
fi
|
|
|
|
# Validate required configuration
|
|
required_vars=(
|
|
"AZURE_TENANT_ID"
|
|
"AZURE_CLIENT_ID"
|
|
"AZURE_CLIENT_SECRET"
|
|
"BC_ENVIRONMENT_NAME"
|
|
"ENCRYPTION_PASSPHRASE"
|
|
"S3_BUCKET"
|
|
"S3_ENDPOINT"
|
|
"AWS_ACCESS_KEY_ID"
|
|
"AWS_SECRET_ACCESS_KEY"
|
|
)
|
|
|
|
for var in "${required_vars[@]}"; do
|
|
if [[ -z "${!var:-}" ]]; then
|
|
log_error "Required configuration variable not set: ${var}"
|
|
exit 1
|
|
fi
|
|
done
|
|
|
|
# Set defaults
|
|
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
|
S3_TOOL="${S3_TOOL:-awscli}"
|
|
MAX_RETRIES="${MAX_RETRIES:-3}"
|
|
CLEANUP_LOCAL="${CLEANUP_LOCAL:-true}"
|
|
|
|
# Export AWS credentials so aws cli can find them
|
|
export AWS_ACCESS_KEY_ID
|
|
export AWS_SECRET_ACCESS_KEY
|
|
export AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
|
|
|
# Determine SinceDateTime for incremental mode
|
|
SINCE_DATETIME=""
|
|
if [[ "${BACKUP_MODE}" == "incremental" ]]; then
|
|
if [[ -f "${STATE_FILE}" ]]; then
|
|
SINCE_DATETIME=$(python3 -c "import json,sys; print(json.load(open(sys.argv[1]))['lastSuccessfulRun'])" "${STATE_FILE}" 2>/dev/null || true)
|
|
fi
|
|
if [[ -z "${SINCE_DATETIME}" ]]; then
|
|
log "No previous run state found, falling back to full backup"
|
|
BACKUP_MODE="full"
|
|
fi
|
|
fi
|
|
|
|
log "========================================="
|
|
log "Starting Business Central backup process"
|
|
log "========================================="
|
|
log "Mode: ${BACKUP_MODE}"
|
|
log "Environment: ${BC_ENVIRONMENT_NAME}"
|
|
log "S3 Bucket: ${S3_BUCKET}"
|
|
log "Retention: ${RETENTION_DAYS} days"
|
|
if [[ -n "${SINCE_DATETIME}" ]]; then
|
|
log "Changes since: ${SINCE_DATETIME}"
|
|
fi
|
|
|
|
# Record the run start time (UTC) before export begins
|
|
RUN_START_TIME=$(date -u '+%Y-%m-%dT%H:%M:%SZ')
|
|
|
|
# Generate timestamp for backup filename
|
|
TIMESTAMP=$(date '+%Y%m%d_%H%M%S')
|
|
BACKUP_FILENAME="bc_backup_${BC_ENVIRONMENT_NAME}_${TIMESTAMP}_${BACKUP_MODE}"
|
|
|
|
# Step 1: Extract data using PowerShell script (BC API v2.0)
|
|
log "Step 1: Extracting data via BC API v2.0 (${BACKUP_MODE})"
|
|
|
|
export AZURE_TENANT_ID
|
|
export AZURE_CLIENT_ID
|
|
export AZURE_CLIENT_SECRET
|
|
export BC_ENVIRONMENT_NAME
|
|
export BC_COMPANY_NAME="${BC_COMPANY_NAME:-}"
|
|
export WORK_DIR
|
|
|
|
EXPORT_DIR="${WORK_DIR}/${BACKUP_FILENAME}"
|
|
|
|
PWSH_ARGS=(-File "${SCRIPT_DIR}/bc-export.ps1" -OutputPath "${EXPORT_DIR}")
|
|
if [[ -n "${SINCE_DATETIME}" ]]; then
|
|
PWSH_ARGS+=(-SinceDateTime "${SINCE_DATETIME}")
|
|
fi
|
|
|
|
pwsh_exit=0
|
|
pwsh "${PWSH_ARGS[@]}" || pwsh_exit=$?
|
|
|
|
if [[ ${pwsh_exit} -eq 2 ]]; then
|
|
# Exit code 2 = success but no records changed
|
|
log "No changes detected since ${SINCE_DATETIME}, skipping backup"
|
|
# Clean up empty export dir
|
|
rm -rf "${EXPORT_DIR}" 2>/dev/null || true
|
|
exit 0
|
|
elif [[ ${pwsh_exit} -ne 0 ]]; then
|
|
log_error "Data export failed (exit code ${pwsh_exit})"
|
|
rm -rf "${EXPORT_DIR}" 2>/dev/null || true
|
|
exit 1
|
|
fi
|
|
|
|
if [[ ! -d "${EXPORT_DIR}" ]]; then
|
|
log_error "Export directory not found: ${EXPORT_DIR}"
|
|
exit 1
|
|
fi
|
|
|
|
# Create tar.gz archive from the export directory
|
|
ARCHIVE_FILE="${WORK_DIR}/${BACKUP_FILENAME}.tar.gz"
|
|
log "Creating archive: ${ARCHIVE_FILE}"
|
|
|
|
tar -czf "${ARCHIVE_FILE}" -C "${WORK_DIR}" "${BACKUP_FILENAME}"
|
|
|
|
ARCHIVE_SIZE=$(du -h "${ARCHIVE_FILE}" | cut -f1)
|
|
log "Data export completed successfully (${ARCHIVE_SIZE})"
|
|
|
|
# Remove export directory
|
|
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
|
|
rm -rf "${EXPORT_DIR}"
|
|
log "Removed export directory"
|
|
fi
|
|
|
|
# Step 2: Encrypt the backup
|
|
log "Step 2: Encrypting backup file with GPG"
|
|
|
|
ENCRYPTED_FILE="${ARCHIVE_FILE}.gpg"
|
|
|
|
if ! echo "${ENCRYPTION_PASSPHRASE}" | gpg \
|
|
--batch \
|
|
--yes \
|
|
--passphrase-fd 0 \
|
|
--symmetric \
|
|
--cipher-algo AES256 \
|
|
--compress-algo none \
|
|
--output "${ENCRYPTED_FILE}" \
|
|
"${ARCHIVE_FILE}"; then
|
|
log_error "Encryption failed"
|
|
exit 1
|
|
fi
|
|
|
|
ENCRYPTED_SIZE=$(du -h "${ENCRYPTED_FILE}" | cut -f1)
|
|
log "Encryption completed successfully (${ENCRYPTED_SIZE})"
|
|
|
|
# Remove unencrypted archive
|
|
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
|
|
rm -f "${ARCHIVE_FILE}"
|
|
log "Removed unencrypted archive"
|
|
fi
|
|
|
|
# Step 3: Upload to S3 with object lock
|
|
log "Step 3: Uploading encrypted backup to S3"
|
|
|
|
S3_KEY="backups/${BACKUP_MODE}/${BACKUP_FILENAME}.tar.gz.gpg"
|
|
S3_URI="s3://${S3_BUCKET}/${S3_KEY}"
|
|
|
|
# Calculate retention date
|
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
RETENTION_DATE=$(date -u -v+${RETENTION_DAYS}d '+%Y-%m-%dT%H:%M:%S')
|
|
else
|
|
RETENTION_DATE=$(date -u -d "+${RETENTION_DAYS} days" '+%Y-%m-%dT%H:%M:%S')
|
|
fi
|
|
|
|
upload_success=false
|
|
|
|
if [[ "${S3_TOOL}" == "awscli" ]]; then
|
|
log "Using AWS CLI for upload"
|
|
|
|
if aws s3api put-object \
|
|
--bucket "${S3_BUCKET}" \
|
|
--key "${S3_KEY}" \
|
|
--body "${ENCRYPTED_FILE}" \
|
|
--endpoint-url "${S3_ENDPOINT}" \
|
|
--object-lock-mode COMPLIANCE \
|
|
--object-lock-retain-until-date "${RETENTION_DATE}Z" \
|
|
--metadata "backup-timestamp=${TIMESTAMP},environment=${BC_ENVIRONMENT_NAME},encrypted=true,type=api-extract,mode=${BACKUP_MODE}"; then
|
|
upload_success=true
|
|
fi
|
|
|
|
elif [[ "${S3_TOOL}" == "s3cmd" ]]; then
|
|
log "Using s3cmd for upload"
|
|
|
|
if s3cmd put \
|
|
--host="${S3_ENDPOINT#*://}" \
|
|
--host-bucket="${S3_ENDPOINT#*://}" \
|
|
"${ENCRYPTED_FILE}" \
|
|
"${S3_URI}"; then
|
|
|
|
log "File uploaded, attempting to set object lock retention"
|
|
if command -v aws &> /dev/null; then
|
|
aws s3api put-object-retention \
|
|
--bucket "${S3_BUCKET}" \
|
|
--key "${S3_KEY}" \
|
|
--endpoint-url "${S3_ENDPOINT}" \
|
|
--retention Mode=COMPLIANCE,RetainUntilDate="${RETENTION_DATE}Z" || \
|
|
log_error "Warning: Could not set object lock retention via AWS CLI"
|
|
else
|
|
log_error "Warning: s3cmd doesn't support object lock. Install aws-cli for full functionality"
|
|
fi
|
|
upload_success=true
|
|
fi
|
|
else
|
|
log_error "Invalid S3_TOOL: ${S3_TOOL}. Must be 'awscli' or 's3cmd'"
|
|
exit 1
|
|
fi
|
|
|
|
if [[ "${upload_success}" == "true" ]]; then
|
|
log "Upload completed successfully: ${S3_URI}"
|
|
log "Object lock retention until: ${RETENTION_DATE}Z"
|
|
else
|
|
log_error "Upload failed"
|
|
exit 1
|
|
fi
|
|
|
|
# Step 4: Verify upload
|
|
log "Step 4: Verifying upload"
|
|
|
|
if [[ "${S3_TOOL}" == "awscli" ]]; then
|
|
if aws s3api head-object \
|
|
--bucket "${S3_BUCKET}" \
|
|
--key "${S3_KEY}" \
|
|
--endpoint-url "${S3_ENDPOINT}" > /dev/null 2>&1; then
|
|
log "Upload verification successful"
|
|
else
|
|
log_error "Upload verification failed"
|
|
exit 1
|
|
fi
|
|
elif [[ "${S3_TOOL}" == "s3cmd" ]]; then
|
|
if s3cmd info "${S3_URI}" --host="${S3_ENDPOINT#*://}" > /dev/null 2>&1; then
|
|
log "Upload verification successful"
|
|
else
|
|
log_error "Upload verification failed"
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Step 5: Update state file (after successful upload)
|
|
log "Step 5: Updating run state"
|
|
cat > "${STATE_FILE}" << EOF
|
|
{"lastSuccessfulRun": "${RUN_START_TIME}", "lastMode": "${BACKUP_MODE}", "lastFile": "${S3_KEY}"}
|
|
EOF
|
|
log "State saved: lastSuccessfulRun=${RUN_START_TIME}"
|
|
|
|
# Step 6: Cleanup
|
|
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
|
|
log "Step 6: Cleaning up local files"
|
|
rm -f "${ENCRYPTED_FILE}"
|
|
log "Local encrypted file removed"
|
|
else
|
|
log "Step 6: Skipping cleanup (CLEANUP_LOCAL=false)"
|
|
log "Encrypted backup retained at: ${ENCRYPTED_FILE}"
|
|
fi
|
|
|
|
# Log rotation - keep last 30 days of logs
|
|
find "${LOG_DIR}" -name "backup.log.*" -mtime +30 -delete 2>/dev/null || true
|
|
|
|
log "========================================="
|
|
log "Backup completed successfully"
|
|
log "Mode: ${BACKUP_MODE}"
|
|
log "Backup file: ${S3_KEY}"
|
|
log "Size: ${ENCRYPTED_SIZE}"
|
|
log "========================================="
|
|
|
|
exit 0
|