feat: switch from Admin Center database export to BC API v2.0 data extraction

The Admin Center export API requires an Azure Storage SAS URI which
requires an Azure Subscription - defeating the purpose of an independent
backup. Instead, use BC API v2.0 to extract critical business data
(customers, vendors, items, GL entries, invoices, etc.) as JSON files.

- bc-export.ps1: rewritten to use BC API v2.0 endpoints, extracts 23
  entity types per company with OData pagination support
- bc-backup.sh: handles JSON export directory, creates tar.gz archive
  before encrypting and uploading to S3
- bc-backup.conf.template: removed Azure Storage SAS config, added
  optional BC_COMPANY_NAME filter
- decrypt-backup.sh: updated for tar.gz.gpg format, shows extracted
  entity files and metadata after decryption

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-10 07:33:32 +01:00
parent 96237787da
commit 77f48f326b
4 changed files with 260 additions and 354 deletions

View File

@@ -29,7 +29,7 @@ AZURE_CLIENT_SECRET=""
# 1. Go to API permissions > Add a permission
# 2. Select "Dynamics 365 Business Central"
# 3. Select "Application permissions"
# 4. Check "Automation.ReadWrite.All" or "API.ReadWrite.All"
# 4. Check "API.ReadWrite.All"
# 5. Click "Grant admin consent" (requires Global Admin)
# ===================================
@@ -40,28 +40,9 @@ AZURE_CLIENT_SECRET=""
# Find this in BC Admin Center: https://businesscentral.dynamics.com/
BC_ENVIRONMENT_NAME=""
# BC Admin API version (default: v2.21)
BC_API_VERSION="v2.21"
# ===================================
# Azure Storage Configuration
# ===================================
# The BC Admin Center API exports the database to your Azure Storage account.
# You need an Azure Storage account with a SAS URI that has Read, Write, Create, Delete permissions.
#
# To create a SAS URI:
# 1. Go to Azure Portal > Storage Accounts > your account
# 2. Go to "Shared access signature"
# 3. Enable: Blob service, Container+Object resource types, Read+Write+Create+Delete permissions
# 4. Set an appropriate expiry date
# 5. Copy the generated SAS URL
# Azure Storage Account SAS URI (full URI with SAS token)
# Example: https://youraccount.blob.core.windows.net?sv=2021-06-08&ss=b&srt=sco&sp=rwdlac&se=...&sig=...
AZURE_STORAGE_SAS_URI=""
# Azure Storage container name for exports (will be created automatically)
AZURE_STORAGE_CONTAINER="bc-exports"
# Optional: Limit export to a specific company name
# Leave empty to export all companies in the environment
BC_COMPANY_NAME=""
# ===================================
# Encryption Configuration
@@ -128,9 +109,6 @@ CLEANUP_LOCAL="true"
# Advanced Configuration
# ===================================
# Maximum time to wait for BC export completion (minutes)
# MAX_EXPORT_WAIT_MINUTES="120"
# Local temporary directory (default: ./temp)
# WORK_DIR="/var/tmp/bc-backup"

View File

@@ -1,7 +1,7 @@
#!/bin/bash
#
# Business Central SaaS Automated Backup Script
# Downloads BC database export, encrypts, and uploads to S3 with immutability
# Extracts BC data via API, encrypts, and uploads to S3 with immutability
#
set -euo pipefail
@@ -39,7 +39,6 @@ required_vars=(
"AZURE_CLIENT_ID"
"AZURE_CLIENT_SECRET"
"BC_ENVIRONMENT_NAME"
"AZURE_STORAGE_SAS_URI"
"ENCRYPTION_PASSPHRASE"
"S3_BUCKET"
"S3_ENDPOINT"
@@ -59,7 +58,6 @@ RETENTION_DAYS="${RETENTION_DAYS:-30}"
S3_TOOL="${S3_TOOL:-awscli}"
MAX_RETRIES="${MAX_RETRIES:-3}"
CLEANUP_LOCAL="${CLEANUP_LOCAL:-true}"
BC_API_VERSION="${BC_API_VERSION:-v2.21}"
log "========================================="
log "Starting Business Central backup process"
@@ -72,37 +70,47 @@ log "Retention: ${RETENTION_DAYS} days"
TIMESTAMP=$(date '+%Y%m%d_%H%M%S')
BACKUP_FILENAME="bc_backup_${BC_ENVIRONMENT_NAME}_${TIMESTAMP}"
# Step 1: Export database using PowerShell script
log "Step 1: Initiating database export via BC Admin Center API"
# Step 1: Extract data using PowerShell script (BC API v2.0)
log "Step 1: Extracting data via BC API v2.0"
export AZURE_TENANT_ID
export AZURE_CLIENT_ID
export AZURE_CLIENT_SECRET
export BC_ENVIRONMENT_NAME
export BC_API_VERSION
export AZURE_STORAGE_SAS_URI
export AZURE_STORAGE_CONTAINER
export BC_COMPANY_NAME="${BC_COMPANY_NAME:-}"
export WORK_DIR
BACPAC_FILE="${WORK_DIR}/${BACKUP_FILENAME}.bacpac"
EXPORT_DIR="${WORK_DIR}/${BACKUP_FILENAME}"
if ! pwsh -File "${SCRIPT_DIR}/bc-export.ps1" -OutputPath "${BACPAC_FILE}"; then
log_error "Database export failed"
if ! pwsh -File "${SCRIPT_DIR}/bc-export.ps1" -OutputPath "${EXPORT_DIR}"; then
log_error "Data export failed"
exit 1
fi
if [[ ! -f "${BACPAC_FILE}" ]]; then
log_error "BACPAC file not found after export: ${BACPAC_FILE}"
if [[ ! -d "${EXPORT_DIR}" ]]; then
log_error "Export directory not found: ${EXPORT_DIR}"
exit 1
fi
BACPAC_SIZE=$(du -h "${BACPAC_FILE}" | cut -f1)
log "Database export completed successfully (${BACPAC_SIZE})"
# Create tar.gz archive from the export directory
ARCHIVE_FILE="${WORK_DIR}/${BACKUP_FILENAME}.tar.gz"
log "Creating archive: ${ARCHIVE_FILE}"
tar -czf "${ARCHIVE_FILE}" -C "${WORK_DIR}" "${BACKUP_FILENAME}"
ARCHIVE_SIZE=$(du -h "${ARCHIVE_FILE}" | cut -f1)
log "Data export completed successfully (${ARCHIVE_SIZE})"
# Remove export directory
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
rm -rf "${EXPORT_DIR}"
log "Removed export directory"
fi
# Step 2: Encrypt the backup
log "Step 2: Encrypting backup file with GPG"
ENCRYPTED_FILE="${BACPAC_FILE}.gpg"
ENCRYPTED_FILE="${ARCHIVE_FILE}.gpg"
if ! echo "${ENCRYPTION_PASSPHRASE}" | gpg \
--batch \
@@ -112,7 +120,7 @@ if ! echo "${ENCRYPTION_PASSPHRASE}" | gpg \
--cipher-algo AES256 \
--compress-algo none \
--output "${ENCRYPTED_FILE}" \
"${BACPAC_FILE}"; then
"${ARCHIVE_FILE}"; then
log_error "Encryption failed"
exit 1
fi
@@ -120,16 +128,16 @@ fi
ENCRYPTED_SIZE=$(du -h "${ENCRYPTED_FILE}" | cut -f1)
log "Encryption completed successfully (${ENCRYPTED_SIZE})"
# Remove unencrypted BACPAC
# Remove unencrypted archive
if [[ "${CLEANUP_LOCAL}" == "true" ]]; then
rm -f "${BACPAC_FILE}"
log "Removed unencrypted BACPAC file"
rm -f "${ARCHIVE_FILE}"
log "Removed unencrypted archive"
fi
# Step 3: Upload to S3 with object lock
log "Step 3: Uploading encrypted backup to S3"
S3_KEY="backups/${BACKUP_FILENAME}.bacpac.gpg"
S3_KEY="backups/${BACKUP_FILENAME}.tar.gz.gpg"
S3_URI="s3://${S3_BUCKET}/${S3_KEY}"
# Calculate retention date
@@ -154,7 +162,7 @@ if [[ "${S3_TOOL}" == "awscli" ]]; then
--endpoint-url "${S3_ENDPOINT}" \
--object-lock-mode COMPLIANCE \
--object-lock-retain-until-date "${RETENTION_DATE}Z" \
--metadata "backup-timestamp=${TIMESTAMP},environment=${BC_ENVIRONMENT_NAME},encrypted=true"; then
--metadata "backup-timestamp=${TIMESTAMP},environment=${BC_ENVIRONMENT_NAME},encrypted=true,type=api-extract"; then
upload_success=true
fi

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env pwsh
#
# Business Central Database Export via Admin Center API
# Authenticates to Azure AD and exports BC database as BACPAC
# Business Central Data Export via BC API v2.0
# Authenticates to Azure AD and extracts critical business data as JSON
#
param(
@@ -14,19 +14,37 @@ $tenantId = $env:AZURE_TENANT_ID
$clientId = $env:AZURE_CLIENT_ID
$clientSecret = $env:AZURE_CLIENT_SECRET
$environmentName = $env:BC_ENVIRONMENT_NAME
$apiVersion = $env:BC_API_VERSION
$storageAccountSasUri = $env:AZURE_STORAGE_SAS_URI
$storageContainer = $env:AZURE_STORAGE_CONTAINER
$bcCompanyName = $env:BC_COMPANY_NAME # optional: filter to specific company
if (-not $apiVersion) {
$apiVersion = "v2.21"
}
$baseUrl = "https://api.businesscentral.dynamics.com/v2.0/$tenantId/$environmentName/api/v2.0"
if (-not $storageContainer) {
$storageContainer = "bc-exports"
}
$baseUrl = "https://api.businesscentral.dynamics.com/admin/$apiVersion"
# Entities to extract - critical business data
$entities = @(
"accounts",
"customers",
"vendors",
"items",
"salesInvoices",
"salesInvoiceLines",
"salesOrders",
"salesOrderLines",
"salesCreditMemos",
"salesCreditMemoLines",
"purchaseInvoices",
"purchaseInvoiceLines",
"purchaseOrders",
"purchaseOrderLines",
"generalLedgerEntries",
"bankAccounts",
"employees",
"dimensions",
"dimensionValues",
"currencies",
"paymentTerms",
"paymentMethods",
"journals",
"countriesRegions"
)
function Write-Log {
param([string]$Message, [string]$Level = "INFO")
@@ -63,283 +81,178 @@ function Get-AzureADToken {
}
}
function Get-ExportMetrics {
function Get-BCData {
param(
[string]$Token,
[string]$EnvironmentName
[string]$Url
)
$headers = @{
"Authorization" = "Bearer $Token"
"Accept" = "application/json"
}
$metricsUrl = "$baseUrl/exports/applications/BusinessCentral/environments/$EnvironmentName/metrics"
$allRecords = @()
try {
$response = Invoke-RestMethod -Uri $metricsUrl -Method Get -Headers $headers
Write-Log "Export metrics - Used this month: $($response.exportsPerMonth), Remaining: $($response.exportsRemainingThisMonth)"
return $response
}
catch {
Write-Log "Failed to get export metrics: $_" "WARN"
return $null
$currentUrl = $Url
while ($currentUrl) {
try {
$response = Invoke-RestMethod -Uri $currentUrl -Method Get -Headers $headers
}
catch {
Write-Log "API request failed for $currentUrl : $_" "ERROR"
throw
}
if ($response.value) {
$allRecords += $response.value
}
# Handle OData pagination
$currentUrl = $response.'@odata.nextLink'
}
return $allRecords
}
function Start-DatabaseExport {
function Get-Companies {
param([string]$Token)
Write-Log "Fetching companies..."
$companiesUrl = "$baseUrl/companies"
$companies = Get-BCData -Token $Token -Url $companiesUrl
Write-Log "Found $($companies.Count) company/companies"
return $companies
}
function Export-EntityData {
param(
[string]$Token,
[string]$EnvironmentName,
[string]$StorageSasUri,
[string]$Container,
[string]$BlobName
[string]$CompanyId,
[string]$CompanyName,
[string]$EntityName,
[string]$OutputDir
)
Write-Log "Initiating database export for environment: $EnvironmentName"
$entityUrl = "$baseUrl/companies($CompanyId)/$EntityName"
$headers = @{
"Authorization" = "Bearer $Token"
"Content-Type" = "application/json"
}
$exportUrl = "$baseUrl/exports/applications/BusinessCentral/environments/$EnvironmentName"
$body = @{
storageAccountSasUri = $StorageSasUri
container = $Container
blob = $BlobName
} | ConvertTo-Json
Write-Log " Exporting $EntityName..."
try {
$response = Invoke-RestMethod -Uri $exportUrl -Method Post -Headers $headers -Body $body
Write-Log "Database export initiated successfully"
return $response
$data = Get-BCData -Token $Token -Url $entityUrl
$count = 0
if ($data) { $count = $data.Count }
$outputFile = Join-Path $OutputDir "$EntityName.json"
$data | ConvertTo-Json -Depth 10 | Out-File -FilePath $outputFile -Encoding utf8
Write-Log " $EntityName : $count records"
return $count
}
catch {
Write-Log "Failed to initiate export: $_" "ERROR"
Write-Log "Response: $($_.ErrorDetails.Message)" "ERROR"
throw
}
}
function Get-ExportHistory {
param(
[string]$Token,
[datetime]$StartTime,
[datetime]$EndTime
)
$headers = @{
"Authorization" = "Bearer $Token"
"Content-Type" = "application/json"
}
$startStr = $StartTime.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
$endStr = $EndTime.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
$historyUrl = "$baseUrl/exports/history?start=$startStr&end=$endStr"
try {
$response = Invoke-RestMethod -Uri $historyUrl -Method Post -Headers $headers
return $response
}
catch {
Write-Log "Failed to get export history: $_" "ERROR"
return $null
}
}
function Wait-ForExport {
param(
[string]$Token,
[string]$EnvironmentName,
[string]$BlobName,
[datetime]$ExportStartTime,
[int]$MaxWaitMinutes = 120
)
Write-Log "Waiting for export to complete (max $MaxWaitMinutes minutes)..."
$startTime = Get-Date
$pollInterval = 30 # seconds
while ($true) {
$elapsed = ((Get-Date) - $startTime).TotalMinutes
if ($elapsed -gt $MaxWaitMinutes) {
Write-Log "Export timeout exceeded ($MaxWaitMinutes minutes)" "ERROR"
return $null
}
$history = Get-ExportHistory -Token $Token -StartTime $ExportStartTime -EndTime (Get-Date)
if ($null -eq $history -or $null -eq $history.value -or $history.value.Count -eq 0) {
Write-Log "No export history found yet, waiting... (Elapsed: $([math]::Round($elapsed, 1)) min)"
Start-Sleep -Seconds $pollInterval
continue
}
# Find our export by environment name and blob name
$ourExport = $history.value | Where-Object {
$_.environmentName -eq $EnvironmentName -and $_.blob -eq $BlobName
} | Sort-Object -Property startedOn -Descending | Select-Object -First 1
if (-not $ourExport) {
# Fallback: just find the most recent export for this environment
$ourExport = $history.value | Where-Object {
$_.environmentName -eq $EnvironmentName
} | Sort-Object -Property startedOn -Descending | Select-Object -First 1
}
if (-not $ourExport) {
Write-Log "Export not found in history yet, waiting... (Elapsed: $([math]::Round($elapsed, 1)) min)"
Start-Sleep -Seconds $pollInterval
continue
}
$exportStatus = $ourExport.status
Write-Log "Export status: $exportStatus (Elapsed: $([math]::Round($elapsed, 1)) min)"
switch ($exportStatus.ToLower()) {
"completed" {
Write-Log "Export completed successfully"
return $ourExport
}
"complete" {
Write-Log "Export completed successfully"
return $ourExport
}
"failed" {
Write-Log "Export failed" "ERROR"
if ($ourExport.failureReason) {
Write-Log "Failure reason: $($ourExport.failureReason)" "ERROR"
}
return $null
}
"inprogress" {
Write-Log "Export in progress..."
}
"queued" {
Write-Log "Export queued..."
}
default {
Write-Log "Unknown status: $exportStatus" "WARN"
}
}
Start-Sleep -Seconds $pollInterval
}
}
function Download-FromAzureStorage {
param(
[string]$StorageSasUri,
[string]$Container,
[string]$BlobName,
[string]$OutputPath
)
# Parse the SAS URI to construct the blob download URL
# SAS URI format: https://account.blob.core.windows.net?sv=...&sig=...
$uri = [System.Uri]$StorageSasUri
$baseStorageUrl = "$($uri.Scheme)://$($uri.Host)"
$sasToken = $uri.Query
$downloadUrl = "$baseStorageUrl/$Container/$BlobName$sasToken"
Write-Log "Downloading BACPAC from Azure Storage..."
Write-Log "Container: $Container"
Write-Log "Blob: $BlobName"
Write-Log "Saving to: $OutputPath"
try {
$ProgressPreference = 'SilentlyContinue'
Invoke-WebRequest -Uri $downloadUrl -OutFile $OutputPath -UseBasicParsing
if (Test-Path $OutputPath) {
$fileSize = (Get-Item $OutputPath).Length
$fileSizeMB = [math]::Round($fileSize / 1MB, 2)
Write-Log "Download completed successfully ($fileSizeMB MB)"
return $true
}
else {
Write-Log "Download failed - file not found" "ERROR"
return $false
}
}
catch {
Write-Log "Download failed: $_" "ERROR"
return $false
Write-Log " Failed to export ${EntityName}: $_" "WARN"
# Write empty array so downstream knows it was attempted
$outputFile = Join-Path $OutputDir "$EntityName.json"
"[]" | Out-File -FilePath $outputFile -Encoding utf8
return 0
}
}
# Main execution
try {
Write-Log "========================================="
Write-Log "BC Database Export Script"
Write-Log "BC Data Export Script (API v2.0)"
Write-Log "========================================="
Write-Log "Environment: $environmentName"
Write-Log "API Version: $apiVersion"
Write-Log "Output Path: $OutputPath"
Write-Log "Entities to extract: $($entities.Count)"
# Validate required parameters
if (-not $storageAccountSasUri) {
Write-Log "AZURE_STORAGE_SAS_URI is required for database export" "ERROR"
exit 1
# Create output directory
$exportDir = $OutputPath
if (-not (Test-Path $exportDir)) {
New-Item -ItemType Directory -Path $exportDir -Force | Out-Null
}
# Step 1: Get Azure AD token
$token = Get-AzureADToken -TenantId $tenantId -ClientId $clientId -ClientSecret $clientSecret
# Step 2: Check export metrics
Write-Log "Checking export metrics..."
$metrics = Get-ExportMetrics -Token $token -EnvironmentName $environmentName
# Step 2: Get companies
$companies = Get-Companies -Token $token
if ($metrics -and $metrics.exportsRemainingThisMonth -le 0) {
Write-Log "No exports remaining this month! (Limit reached)" "ERROR"
if ($companies.Count -eq 0) {
Write-Log "No companies found in environment $environmentName" "ERROR"
exit 1
}
# Step 3: Start the export
$blobName = "bc_export_${environmentName}_$(Get-Date -Format 'yyyyMMdd_HHmmss').bacpac"
$exportStartTime = (Get-Date).AddMinutes(-1) # slight buffer for clock differences
# Save companies list
$companies | ConvertTo-Json -Depth 10 | Out-File -FilePath (Join-Path $exportDir "companies.json") -Encoding utf8
Write-Log "Starting export to Azure Storage (container: $storageContainer, blob: $blobName)..."
$exportResult = Start-DatabaseExport `
-Token $token `
-EnvironmentName $environmentName `
-StorageSasUri $storageAccountSasUri `
-Container $storageContainer `
-BlobName $blobName
# Step 4: Wait for export to complete
$completedExport = Wait-ForExport `
-Token $token `
-EnvironmentName $environmentName `
-BlobName $blobName `
-ExportStartTime $exportStartTime `
-MaxWaitMinutes 120
if (-not $completedExport) {
Write-Log "Export did not complete successfully" "ERROR"
exit 1
# Filter to specific company if configured
$targetCompanies = $companies
if ($bcCompanyName) {
$targetCompanies = $companies | Where-Object { $_.name -eq $bcCompanyName -or $_.displayName -eq $bcCompanyName }
if ($targetCompanies.Count -eq 0) {
Write-Log "Company '$bcCompanyName' not found. Available: $($companies.name -join ', ')" "ERROR"
exit 1
}
Write-Log "Filtering to company: $bcCompanyName"
}
# Step 5: Download the BACPAC from Azure Storage
$downloadSuccess = Download-FromAzureStorage `
-StorageSasUri $storageAccountSasUri `
-Container $storageContainer `
-BlobName $blobName `
-OutputPath $OutputPath
$totalRecords = 0
$totalEntities = 0
$failedEntities = @()
if (-not $downloadSuccess) {
Write-Log "Failed to download export" "ERROR"
exit 1
# Step 3: Export data for each company
foreach ($company in $targetCompanies) {
$companyName = $company.name
$companyId = $company.id
Write-Log "-----------------------------------------"
Write-Log "Exporting company: $companyName ($companyId)"
# Create company directory (sanitize name for filesystem)
$safeName = $companyName -replace '[\\/:*?"<>|]', '_'
$companyDir = Join-Path $exportDir $safeName
if (-not (Test-Path $companyDir)) {
New-Item -ItemType Directory -Path $companyDir -Force | Out-Null
}
foreach ($entity in $entities) {
$count = Export-EntityData `
-Token $token `
-CompanyId $companyId `
-CompanyName $companyName `
-EntityName $entity `
-OutputDir $companyDir
$totalRecords += $count
$totalEntities++
if ($count -eq 0) {
$failedEntities += "$companyName/$entity"
}
}
}
# Save export metadata
$metadata = @{
exportDate = (Get-Date -Format "yyyy-MM-dd HH:mm:ss UTC" -AsUTC)
environment = $environmentName
companies = @($targetCompanies | ForEach-Object { $_.name })
entitiesExported = $totalEntities
totalRecords = $totalRecords
failedEntities = $failedEntities
}
$metadata | ConvertTo-Json -Depth 5 | Out-File -FilePath (Join-Path $exportDir "export-metadata.json") -Encoding utf8
Write-Log "========================================="
Write-Log "Export completed successfully"
Write-Log "Export completed"
Write-Log "Companies: $($targetCompanies.Count)"
Write-Log "Entities: $totalEntities"
Write-Log "Total records: $totalRecords"
if ($failedEntities.Count -gt 0) {
Write-Log "Failed/empty: $($failedEntities.Count) entities" "WARN"
}
Write-Log "========================================="
exit 0
}

View File

@@ -1,7 +1,7 @@
#!/bin/bash
#
# Business Central Backup Decryption Utility
# Decrypts a GPG-encrypted BACPAC backup file
# Decrypts a GPG-encrypted backup archive and extracts JSON data
#
set -euo pipefail
@@ -37,22 +37,22 @@ show_usage() {
cat << EOF
Business Central Backup Decryption Utility
Usage: $0 <encrypted-file> [output-file]
Usage: $0 <encrypted-file> [output-directory]
Arguments:
<encrypted-file> Path to the encrypted .gpg backup file
[output-file] Optional: Path for decrypted output (default: removes .gpg extension)
<encrypted-file> Path to the encrypted .tar.gz.gpg backup file
[output-directory] Optional: Path to extract data to (default: current directory)
Examples:
# Decrypt to default name (backup.bacpac)
$0 backup.bacpac.gpg
# Decrypt and extract to current directory
$0 bc_backup_Production_20260107_100000.tar.gz.gpg
# Decrypt to specific name
$0 backup.bacpac.gpg restored_database.bacpac
# Decrypt and extract to specific directory
$0 backup.tar.gz.gpg ./restored-data/
# Download from S3 and decrypt
aws s3 cp s3://bucket/backups/bc_backup_Production_20260107_100000.bacpac.gpg ./backup.gpg
$0 backup.gpg
aws s3 cp s3://bucket/backups/bc_backup_Production_20260107_100000.tar.gz.gpg ./backup.gpg
$0 backup.gpg ./restored/
Note: You will be prompted for the encryption passphrase.
This is the ENCRYPTION_PASSPHRASE from bc-backup.conf
@@ -66,7 +66,7 @@ if [[ $# -lt 1 ]]; then
fi
ENCRYPTED_FILE="$1"
OUTPUT_FILE="${2:-}"
OUTPUT_DIR="${2:-.}"
# Validate encrypted file exists
if [[ ! -f "$ENCRYPTED_FILE" ]]; then
@@ -74,72 +74,33 @@ if [[ ! -f "$ENCRYPTED_FILE" ]]; then
exit 1
fi
# Determine output filename
if [[ -z "$OUTPUT_FILE" ]]; then
# Remove .gpg extension
OUTPUT_FILE="${ENCRYPTED_FILE%.gpg}"
# If still the same (no .gpg extension), append .decrypted
if [[ "$OUTPUT_FILE" == "$ENCRYPTED_FILE" ]]; then
OUTPUT_FILE="${ENCRYPTED_FILE}.decrypted"
fi
fi
# Check if output file already exists
if [[ -f "$OUTPUT_FILE" ]]; then
echo_warn "Output file already exists: $OUTPUT_FILE"
read -p "Overwrite? (y/n) " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo_info "Aborted."
exit 0
fi
fi
# Create output directory
mkdir -p "$OUTPUT_DIR"
echo_info "========================================="
echo_info "BC Backup Decryption"
echo_info "========================================="
echo_info "Encrypted file: $ENCRYPTED_FILE"
echo_info "Output file: $OUTPUT_FILE"
echo_info "Output directory: $OUTPUT_DIR"
echo_info "File size: $(du -h "$ENCRYPTED_FILE" | cut -f1)"
echo ""
echo_warn "You will be prompted for the encryption passphrase"
echo_warn "This is the ENCRYPTION_PASSPHRASE from bc-backup.conf"
echo ""
# Decrypt the file
if gpg \
# Determine intermediate tar.gz filename
TARBALL="${ENCRYPTED_FILE%.gpg}"
if [[ "$TARBALL" == "$ENCRYPTED_FILE" ]]; then
TARBALL="${ENCRYPTED_FILE}.tar.gz"
fi
# Step 1: Decrypt
echo_info "Decrypting..."
if ! gpg \
--decrypt \
--output "$OUTPUT_FILE" \
--output "$TARBALL" \
"$ENCRYPTED_FILE"; then
echo ""
echo_info "========================================="
echo_info "Decryption completed successfully!"
echo_info "========================================="
echo_info "Decrypted file: $OUTPUT_FILE"
echo_info "File size: $(du -h "$OUTPUT_FILE" | cut -f1)"
echo ""
echo_info "Next steps for restoration:"
echo ""
echo "1. Install SqlPackage (if not already installed):"
echo " Download from: https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-download"
echo ""
echo "2. Create or identify target Azure SQL Database"
echo ""
echo "3. Import the BACPAC:"
echo " sqlpackage /a:Import \\"
echo " /sf:$OUTPUT_FILE \\"
echo " /tsn:your-server.database.windows.net \\"
echo " /tdn:RestoredBCDatabase \\"
echo " /tu:admin \\"
echo " /tp:YourPassword"
echo ""
echo "4. Contact Microsoft Support to connect BC to the restored database"
echo ""
exit 0
else
echo ""
echo_error "Decryption failed!"
echo_error "Possible causes:"
@@ -148,3 +109,49 @@ else
echo " - File is not GPG-encrypted"
exit 1
fi
# Step 2: Extract
echo_info "Extracting archive..."
tar -xzf "$TARBALL" -C "$OUTPUT_DIR"
# Remove intermediate tar.gz
rm -f "$TARBALL"
# Find the extracted directory
EXTRACTED_DIR=$(find "$OUTPUT_DIR" -maxdepth 1 -name "bc_backup_*" -type d | head -1)
echo ""
echo_info "========================================="
echo_info "Decryption and extraction completed!"
echo_info "========================================="
echo_info "Data extracted to: ${EXTRACTED_DIR:-$OUTPUT_DIR}"
echo ""
# Show metadata if present
METADATA_FILE="${EXTRACTED_DIR:-$OUTPUT_DIR}/export-metadata.json"
if [[ -f "$METADATA_FILE" ]]; then
echo_info "Export metadata:"
cat "$METADATA_FILE"
echo ""
fi
# List what was extracted
echo_info "Extracted contents:"
if [[ -n "$EXTRACTED_DIR" ]]; then
ls -la "$EXTRACTED_DIR"/
echo ""
# List company directories
for dir in "$EXTRACTED_DIR"/*/; do
if [[ -d "$dir" ]]; then
company=$(basename "$dir")
file_count=$(find "$dir" -name "*.json" | wc -l)
echo_info " Company '$company': $file_count entity files"
fi
done
fi
echo ""
echo_info "The extracted JSON files contain your BC business data."
echo_info "Each entity (customers, vendors, GL entries, etc.) is a separate JSON file."
exit 0