Files
BC-bak/decrypt-backup.sh
Malin 77f48f326b feat: switch from Admin Center database export to BC API v2.0 data extraction
The Admin Center export API requires an Azure Storage SAS URI which
requires an Azure Subscription - defeating the purpose of an independent
backup. Instead, use BC API v2.0 to extract critical business data
(customers, vendors, items, GL entries, invoices, etc.) as JSON files.

- bc-export.ps1: rewritten to use BC API v2.0 endpoints, extracts 23
  entity types per company with OData pagination support
- bc-backup.sh: handles JSON export directory, creates tar.gz archive
  before encrypting and uploading to S3
- bc-backup.conf.template: removed Azure Storage SAS config, added
  optional BC_COMPANY_NAME filter
- decrypt-backup.sh: updated for tar.gz.gpg format, shows extracted
  entity files and metadata after decryption

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-10 07:33:32 +01:00

158 lines
3.9 KiB
Bash
Executable File

#!/bin/bash
#
# Business Central Backup Decryption Utility
# Decrypts a GPG-encrypted backup archive and extracts JSON data
#
set -euo pipefail
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
echo_info() {
echo -e "${GREEN}[INFO]${NC} $*"
}
echo_warn() {
echo -e "${YELLOW}[WARN]${NC} $*"
}
echo_error() {
echo -e "${RED}[ERROR]${NC} $*"
}
# Check if GPG is installed
if ! command -v gpg &> /dev/null; then
echo_error "GPG is not installed. Install it first:"
echo " Ubuntu/Debian: sudo apt-get install gnupg"
echo " CentOS/RHEL: sudo yum install gnupg2"
exit 1
fi
# Usage information
show_usage() {
cat << EOF
Business Central Backup Decryption Utility
Usage: $0 <encrypted-file> [output-directory]
Arguments:
<encrypted-file> Path to the encrypted .tar.gz.gpg backup file
[output-directory] Optional: Path to extract data to (default: current directory)
Examples:
# Decrypt and extract to current directory
$0 bc_backup_Production_20260107_100000.tar.gz.gpg
# Decrypt and extract to specific directory
$0 backup.tar.gz.gpg ./restored-data/
# Download from S3 and decrypt
aws s3 cp s3://bucket/backups/bc_backup_Production_20260107_100000.tar.gz.gpg ./backup.gpg
$0 backup.gpg ./restored/
Note: You will be prompted for the encryption passphrase.
This is the ENCRYPTION_PASSPHRASE from bc-backup.conf
EOF
}
# Check arguments
if [[ $# -lt 1 ]]; then
show_usage
exit 1
fi
ENCRYPTED_FILE="$1"
OUTPUT_DIR="${2:-.}"
# Validate encrypted file exists
if [[ ! -f "$ENCRYPTED_FILE" ]]; then
echo_error "Encrypted file not found: $ENCRYPTED_FILE"
exit 1
fi
# Create output directory
mkdir -p "$OUTPUT_DIR"
echo_info "========================================="
echo_info "BC Backup Decryption"
echo_info "========================================="
echo_info "Encrypted file: $ENCRYPTED_FILE"
echo_info "Output directory: $OUTPUT_DIR"
echo_info "File size: $(du -h "$ENCRYPTED_FILE" | cut -f1)"
echo ""
echo_warn "You will be prompted for the encryption passphrase"
echo_warn "This is the ENCRYPTION_PASSPHRASE from bc-backup.conf"
echo ""
# Determine intermediate tar.gz filename
TARBALL="${ENCRYPTED_FILE%.gpg}"
if [[ "$TARBALL" == "$ENCRYPTED_FILE" ]]; then
TARBALL="${ENCRYPTED_FILE}.tar.gz"
fi
# Step 1: Decrypt
echo_info "Decrypting..."
if ! gpg \
--decrypt \
--output "$TARBALL" \
"$ENCRYPTED_FILE"; then
echo ""
echo_error "Decryption failed!"
echo_error "Possible causes:"
echo " - Incorrect passphrase"
echo " - Corrupted encrypted file"
echo " - File is not GPG-encrypted"
exit 1
fi
# Step 2: Extract
echo_info "Extracting archive..."
tar -xzf "$TARBALL" -C "$OUTPUT_DIR"
# Remove intermediate tar.gz
rm -f "$TARBALL"
# Find the extracted directory
EXTRACTED_DIR=$(find "$OUTPUT_DIR" -maxdepth 1 -name "bc_backup_*" -type d | head -1)
echo ""
echo_info "========================================="
echo_info "Decryption and extraction completed!"
echo_info "========================================="
echo_info "Data extracted to: ${EXTRACTED_DIR:-$OUTPUT_DIR}"
echo ""
# Show metadata if present
METADATA_FILE="${EXTRACTED_DIR:-$OUTPUT_DIR}/export-metadata.json"
if [[ -f "$METADATA_FILE" ]]; then
echo_info "Export metadata:"
cat "$METADATA_FILE"
echo ""
fi
# List what was extracted
echo_info "Extracted contents:"
if [[ -n "$EXTRACTED_DIR" ]]; then
ls -la "$EXTRACTED_DIR"/
echo ""
# List company directories
for dir in "$EXTRACTED_DIR"/*/; do
if [[ -d "$dir" ]]; then
company=$(basename "$dir")
file_count=$(find "$dir" -name "*.json" | wc -l)
echo_info " Company '$company': $file_count entity files"
fi
done
fi
echo ""
echo_info "The extracted JSON files contain your BC business data."
echo_info "Each entity (customers, vendors, GL entries, etc.) is a separate JSON file."
exit 0