feat: switch from Admin Center database export to BC API v2.0 data extraction

The Admin Center export API requires an Azure Storage SAS URI which
requires an Azure Subscription - defeating the purpose of an independent
backup. Instead, use BC API v2.0 to extract critical business data
(customers, vendors, items, GL entries, invoices, etc.) as JSON files.

- bc-export.ps1: rewritten to use BC API v2.0 endpoints, extracts 23
  entity types per company with OData pagination support
- bc-backup.sh: handles JSON export directory, creates tar.gz archive
  before encrypting and uploading to S3
- bc-backup.conf.template: removed Azure Storage SAS config, added
  optional BC_COMPANY_NAME filter
- decrypt-backup.sh: updated for tar.gz.gpg format, shows extracted
  entity files and metadata after decryption

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-10 07:33:32 +01:00
parent 96237787da
commit 77f48f326b
4 changed files with 260 additions and 354 deletions

View File

@@ -1,7 +1,7 @@
#!/bin/bash
#
# Business Central SaaS Automated Backup Script
# Downloads BC database export, encrypts, and uploads to S3 with immutability
# Extracts BC data via API, encrypts, and uploads to S3 with immutability
#
set -euo pipefail
@@ -39,7 +39,6 @@ required_vars=(
"AZURE_CLIENT_ID"
"AZURE_CLIENT_SECRET"
"BC_ENVIRONMENT_NAME"
"AZURE_STORAGE_SAS_URI"
"ENCRYPTION_PASSPHRASE"
"S3_BUCKET"
"S3_ENDPOINT"
@@ -59,7 +58,6 @@ RETENTION_DAYS="${RETENTION_DAYS:-30}"
S3_TOOL="${S3_TOOL:-awscli}"
MAX_RETRIES="${MAX_RETRIES:-3}"
CLEANUP_LOCAL="${CLEANUP_LOCAL:-true}"
BC_API_VERSION="${BC_API_VERSION:-v2.21}"
log "========================================="
log "Starting Business Central backup process"
@@ -72,37 +70,47 @@ log "Retention: ${RETENTION_DAYS} days"
TIMESTAMP=$(date '+%Y%m%d_%H%M%S')
BACKUP_FILENAME="bc_backup_${BC_ENVIRONMENT_NAME}_${TIMESTAMP}"
# Step 1: Export database using PowerShell script
log "Step 1: Initiating database export via BC Admin Center API"
# Step 1: Extract data using PowerShell script (BC API v2.0)
log "Step 1: Extracting data via BC API v2.0"
export AZURE_TENANT_ID
export AZURE_CLIENT_ID
export AZURE_CLIENT_SECRET
export BC_ENVIRONMENT_NAME
export BC_API_VERSION
export AZURE_STORAGE_SAS_URI
export AZURE_STORAGE_CONTAINER
export BC_COMPANY_NAME="${BC_COMPANY_NAME:-}"
export WORK_DIR
BACPAC_FILE="${WORK_DIR}/${BACKUP_FILENAME}.bacpac"
EXPORT_DIR="${WORK_DIR}/${BACKUP_FILENAME}"
if ! pwsh -File "${SCRIPT_DIR}/bc-export.ps1" -OutputPath "${BACPAC_FILE}"; then
log_error "Database export failed"
if ! pwsh -File "${SCRIPT_DIR}/bc-export.ps1" -OutputPath "${EXPORT_DIR}"; then
log_error "Data export failed"
exit 1
fi
if [[ ! -f "${BACPAC_FILE}" ]]; then
log_error "BACPAC file not found after export: ${BACPAC_FILE}"
if [[ ! -d "${EXPORT_DIR}" ]]; then
log_error "Export directory not found: ${EXPORT_DIR}"
exit 1
fi
BACPAC_SIZE=$(du -h "${BACPAC_FILE}" | cut -f1)
log "Database export completed successfully (${BACPAC_SIZE})"
# Create tar.gz archive from the export directory
ARCHIVE_FILE="${WORK_DIR}/${BACKUP_FILENAME}.tar.gz"
log "Creating archive: ${ARCHIVE_FILE}"
tar -czf "${ARCHIVE_FILE}" -C "${WORK_DIR}" "${BACKUP_FILENAME}"
ARCHIVE_SIZE=$(du -h "${ARCHIVE_FILE}" | cut -f1)
log "Data export completed successfully (${ARCHIVE_SIZE})"
# Remove export directory
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
rm -rf "${EXPORT_DIR}"
log "Removed export directory"
fi
# Step 2: Encrypt the backup
log "Step 2: Encrypting backup file with GPG"
ENCRYPTED_FILE="${BACPAC_FILE}.gpg"
ENCRYPTED_FILE="${ARCHIVE_FILE}.gpg"
if ! echo "${ENCRYPTION_PASSPHRASE}" | gpg \
--batch \
@@ -112,7 +120,7 @@ if ! echo "${ENCRYPTION_PASSPHRASE}" | gpg \
--cipher-algo AES256 \
--compress-algo none \
--output "${ENCRYPTED_FILE}" \
"${BACPAC_FILE}"; then
"${ARCHIVE_FILE}"; then
log_error "Encryption failed"
exit 1
fi
@@ -120,16 +128,16 @@ fi
ENCRYPTED_SIZE=$(du -h "${ENCRYPTED_FILE}" | cut -f1)
log "Encryption completed successfully (${ENCRYPTED_SIZE})"
# Remove unencrypted BACPAC
# Remove unencrypted archive
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
rm -f "${BACPAC_FILE}"
log "Removed unencrypted BACPAC file"
rm -f "${ARCHIVE_FILE}"
log "Removed unencrypted archive"
fi
# Step 3: Upload to S3 with object lock
log "Step 3: Uploading encrypted backup to S3"
S3_KEY="backups/${BACKUP_FILENAME}.bacpac.gpg"
S3_KEY="backups/${BACKUP_FILENAME}.tar.gz.gpg"
S3_URI="s3://${S3_BUCKET}/${S3_KEY}"
# Calculate retention date
@@ -154,7 +162,7 @@ if [[ "${S3_TOOL}" == "awscli" ]]; then
--endpoint-url "${S3_ENDPOINT}" \
--object-lock-mode COMPLIANCE \
--object-lock-retain-until-date "${RETENTION_DATE}Z" \
--metadata "backup-timestamp=${TIMESTAMP},environment=${BC_ENVIRONMENT_NAME},encrypted=true"; then
--metadata "backup-timestamp=${TIMESTAMP},environment=${BC_ENVIRONMENT_NAME},encrypted=true,type=api-extract"; then
upload_success=true
fi