Backup IBM API Connect Management Database using SFTP (Scheduled)

Danang Priabada
9 min readAug 1, 2024

--

Im using IBM APIC version 10.0.8.0–258.

First Backup your management infrastructure configuration

This script automate the backup configuration from this guidance

#!/bin/bash

# README : There is some TODO, that you need to pay attention. Search TODO to find which variable you need to change

set -e

# Function to log errors
log_error() {
echo "[ERROR] $1" >&2
}

# Function to check the last command's exit status and log an error if it failed
check_exit_status() {
if [ $? -ne 0 ]; then
log_error "$1"
exit 1
fi
}

# Create a timestamped directory
TIMESTAMP=$(date +"%d%m%Y-%H%M%S")
BACKUP_DIR="backup_secrets_$TIMESTAMP"
mkdir -p "$BACKUP_DIR"
check_exit_status "Failed to create backup directory $BACKUP_DIR."

# Namespace to fetch secrets from
# TODO : Change NAMESPACE variable value
NAMESPACE="cp4i"
ENVLOCATION="DC2"

# Fetch all CredentialSecret names and generate commands to backup them
kubectl -n "$NAMESPACE" get mgmt -o yaml | grep CredentialSecret | awk '{print "kubectl -n '$NAMESPACE' get secret " $2 " -o yaml > '$BACKUP_DIR'/" $2 "_backup.yaml"}' > fetch_secrets.sh
check_exit_status "Failed to generate fetch_secrets.sh script."

# Make fetch_secrets.sh executable and run it
chmod +x fetch_secrets.sh
check_exit_status "Failed to make fetch_secrets.sh executable."
./fetch_secrets.sh
check_exit_status "Failed to execute fetch_secrets.sh."

# Remove ownerReferences and selfLink from all backup YAML files
for FILE in "$BACKUP_DIR"/*_backup.yaml; do
# Remove ownerReferences and selfLink using sed
sed -i '/ownerReferences:/,/^ *uid:/d' "$FILE"
check_exit_status "Failed to remove ownerReferences from $FILE."
sed -i '/selfLink:/d' "$FILE"
check_exit_status "Failed to remove selfLink from $FILE."
done

# Clean up fetch_secrets.sh
rm fetch_secrets.sh
check_exit_status "Failed to remove fetch_secrets.sh."

echo "Secrets fetched and edited successfully."

# Backup the ManagementCluster resource
kubectl -n "$NAMESPACE" get ManagementCluster -o yaml > "$BACKUP_DIR"/mgmt_cr_backup.yaml
check_exit_status "Failed to backup ManagementCluster resource."

# Backup the PortalCluster resource
kubectl -n "$NAMESPACE" get PortalCluster -o yaml > "$BACKUP_DIR"/ptl_cr_backup.yaml
check_exit_status "Failed to backup PortalCluster resource."

# Get the management encryption key and backup its secret
ENC_KEY_MGMT=$(kubectl -n "$NAMESPACE" get mgmt -o yaml | grep encryption | awk '{print $2}')
if [ -z "$ENC_KEY_MGMT" ]; then
log_error "Failed to retrieve management encryption key."
exit 1
fi
kubectl -n "$NAMESPACE" get secret "$ENC_KEY_MGMT" -o yaml > "$BACKUP_DIR"/management_enc_key_backup.yaml
check_exit_status "Failed to backup management encryption key secret."

# Get the portal encryption key and backup its secret
ENC_KEY_PTL=$(kubectl -n "$NAMESPACE" get ptl -o yaml | grep encryption | awk '{print $2}')
if [ -z "$ENC_KEY_PTL" ]; then
log_error "Failed to retrieve portal encryption key."
exit 1
fi
kubectl -n "$NAMESPACE" get secret "$ENC_KEY_PTL" -o yaml > "$BACKUP_DIR"/portal_enc_key_backup.yaml
check_exit_status "Failed to backup portal encryption key secret."

# Check if mgmt-backup-secret exists
MGMT_BACKUP_SECRET=$(kubectl -n "$NAMESPACE" get secret | grep mgmt-backup-secret | awk '{print $1}')
if [ -n "$MGMT_BACKUP_SECRET" ]; then
# Backup the existing mgmt-backup-secret
kubectl -n "$NAMESPACE" get secret "$MGMT_BACKUP_SECRET" -o yaml > "$BACKUP_DIR"/mgmt-backup-secret_backup.yaml
check_exit_status "Failed to backup existing mgmt-backup-secret."
else
# Create a new mgmt-backup-secret
# TODO : Change USERNAME and PASSWORD variable value
USERNAME="sftpuser"
PASSWORD="P4ss#word123!"

kubectl -n "$NAMESPACE" create secret generic mgmt-backup-secret \
--from-literal=username="$USERNAME" \
--from-literal=password="$PASSWORD"
check_exit_status "Failed to create new mgmt-backup-secret."

# Backup the newly created mgmt-backup-secret
kubectl -n "$NAMESPACE" get secret mgmt-backup-secret -o yaml > "$BACKUP_DIR"/mgmt-backup-secret_backup.yaml
check_exit_status "Failed to backup newly created mgmt-backup-secret."
fi

echo "Backup and secret management completed successfully."

# Check if ptl-backup-secret exists
PTL_BACKUP_SECRET=$(kubectl -n "$NAMESPACE" get secret | grep ptl-backup-secret | awk '{print $1}')
if [ -n "$PTL_BACKUP_SECRET" ]; then
# Backup the existing ptl-backup-secret
kubectl -n "$NAMESPACE" get secret "$PTL_BACKUP_SECRET" -o yaml > "$BACKUP_DIR"/ptl-backup-secret_backup.yaml
check_exit_status "Failed to backup existing ptl-backup-secret."
else
# Create a new ptl-backup-secret
# TODO : Change USERNAME and PASSWORD variable value
USERNAME="sftpuser"
PASSWORD="P4ss#word123!"

kubectl -n "$NAMESPACE" create secret generic ptl-backup-secret \
--from-literal=username="$USERNAME" \
--from-literal=password="$PASSWORD"
check_exit_status "Failed to create new ptl-backup-secret."

# Backup the newly created ptl-backup-secret
kubectl -n "$NAMESPACE" get secret ptl-backup-secret -o yaml > "$BACKUP_DIR"/ptl-backup-secret_backup.yaml
check_exit_status "Failed to backup newly created ptl-backup-secret."
fi

echo "Backup and secret portal completed successfully."

This script generates these files

[root@BASTION backup_secrets_01082024-174252]# ls -all
total 76
drwxr-xr-x. 2 root root 4096 Jul 31 17:42 .
drwxr-xr-x. 3 root root 123 Jul 31 17:42 ..
-rw-r--r--. 1 root root 611 Jul 31 17:42 apiconnect-5019c580-atm-cred_backup.yaml
-rw-r--r--. 1 root root 613 Jul 31 17:42 apiconnect-5019c580-ccli-cred_backup.yaml
-rw-r--r--. 1 root root 611 Jul 31 17:42 apiconnect-5019c580-cli-cred_backup.yaml
-rw-r--r--. 1 root root 611 Jul 31 17:42 apiconnect-5019c580-cui-cred_backup.yaml
-rw-r--r--. 1 root root 623 Jul 31 17:42 apiconnect-5019c580-discovery-cred_backup.yaml
-rw-r--r--. 1 root root 613 Jul 31 17:42 apiconnect-5019c580-dsgr-cred_backup.yaml
-rw-r--r--. 1 root root 625 Jul 31 17:42 apiconnect-5019c580-governance-cred_backup.yaml
-rw-r--r--. 1 root root 613 Jul 31 17:42 apiconnect-5019c580-juhu-cred_backup.yaml
-rw-r--r--. 1 root root 609 Jul 31 17:42 apiconnect-5019c580-ui-cred_backup.yaml
-rw-r--r--. 1 root root 655 Jul 31 17:42 management_enc_key_backup.yaml
-rw-r--r--. 1 root root 271 Jul 31 17:42 mgmt-backup-secret_backup.yaml
-rw-r--r--. 1 root root 9539 Jul 31 17:42 mgmt_cr_backup.yaml
-rw-r--r--. 1 root root 614 Jul 31 17:42 portal_enc_key_backup.yaml
-rw-r--r--. 1 root root 270 Jul 31 17:42 ptl-backup-secret_backup.yaml
-rw-r--r--. 1 root root 4883 Jul 31 17:42 ptl_cr_backup.yaml

Next you need to setup your SFTP server

In this case I'm using RHEL as Operating System

ssh root@192.168.192.168
sudo yum update -y 

sudo yum install openssh-server -y

sudo systemctl start sshd
sudo systemctl enable sshd

Group : sftpusers
User : sftpuser

sudo adduser sftpuser
sudo passwd sftpuser

Before you edit the original sshd_config, you need to backup it.

sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.bak

Then edit, add this code on the last of file

sudo vi /etc/ssh/sshd_config

# User sftp for APIC Backup
Match User sftpuser
ForceCommand internal-sftp
PasswordAuthentication yes
ChrootDirectory /home/sftpuser
PermitTunnel no
AllowAgentForwarding no
AllowTcpForwarding no
X11Forwarding no

Make sure the Chroot Directory have proper user access

The ChrootDirectory setting in SFTP is used to lock a user into a specific directory on the server. This means when the user logs in, they can only see and access that directory and its subdirectories. They can't navigate to any other part of the server's file system.

Chroot Directory should be set as root access

sudo chown root:root /home/sftpuser 
sudo chmod 755 /home/sftpuser

Create folder that will be needed by the APIC

sudo mkdir -p /home/sftpuser/apicbackup/DC2/mgmtb
sudo chown sftpuser:sftpuser /home/sftpuser/apicbackup/DC2/mgmtb
sudo chmod 700 /home/sftpuser/apicbackup/DC2/mgmtb
sudo systemctl restart sshd 

Test your SFTP Server

C:\Users\danang.priabada>sftp sftpuser@192.168.192.168
sftpuser@192.168.192.168's password:
Connected to 192.168.192.168.
sftp>
sftp> put FileFromClient.txt
Uploading FileFromClient.txt to /apicbackup/DC2/mgmtb/FileFromClient.txt
FileFromClient.txt 100% 0 0.0KB/s 00:00
sftp> ls
FileFromClient.txt FileFromServer.txt
sftp> get FileFromServer.txt
Fetching /FileFromServer.txt to FileFromServer.txt
sftp> lls
sftp> FileFromServer.txt

The SFTP setup is done, now back to bastion server.

Create Backup Secret

The backup secret is a Kubernetes secret that contains your access key and secret for your SFTP server or object-store. This secret will be load in API Connect Cluster CR.

oc -n <management namespace> create secret generic mgmt-backup-secret --from-literal=username='<username>' --from-literal=password='<password>'
oc -n cp4i create secret generic mgmt-backup-secret --from-literal=username='myadmin' --from-literal=password='P4ss#word123!'

Edit Custom Resource of IBM API Connect Cluster

Login to you bastion server then execute this command, change your own namespace in this case we are using cp4i

oc -n cp4i edit APIConnectCluster

Add this line to the CR and also don’t forget to load your Backup Secret Here.

spec:
management:
databaseBackup:
credentials: mgmt-backup-secret
host: 192.168.192.168
path: /apicbackup/mgmtb
port: 22
protocol: sftp
repoRetentionFull: 30
schedule: 0 0 16 * * *

Note: The Schedule parameter follows the UTC time of the OCP server. You can ssh to one ocp node, then see the date.

Schedule:

The schedule for the backup operation, specified in cron format.

  • Value: 0 0 16 * * *
  • 0: Seconds field, the backup will start at exactly zero seconds.
  • 0: Minute field, indicating the backup starts at exactly zero minute of the hour.
  • 16: Hour field, indicating the backup starts at 16:00 PM.
  • *: Day of the month field, indicating the backup runs every day.
  • *: Month field, indicating the backup runs every month.
  • *: Day of the week field, indicating the backup runs every day of the week.

The configuration sets up a daily backup of the management database at 16:00 PM UTC, storing the backups on a server with IP 192.168.192.168 via SFTP. The credentials for accessing the server are stored in a Kubernetes secret named mgmt-backup-secret. The backups are stored in the directory /apicbackup/mgmtb, and up to 30 full backups are retained.

Check S3 Proxy Username and Password

We need to make sure the proxy username and password is the same data with your secret information “mgmt-backup-secret

oc exec -it <s3proxy-pod> -- bash -c 'cat /etc/s3proxy/upstream/username'
oc exec -it <s3proxy-pod> -- bash -c 'cat /etc/s3proxy/upstream/password'
[root@BASTION ~]# oc exec -it apiconnect-5019c580-s3proxy-0 -- bash -c 'cat /etc/s3proxy/upstream/username'
myadmin
[root@BASTION ~]# oc exec -it apiconnect-5019c580-s3proxy-0 -- bash -c 'cat /etc/s3proxy/upstream/password'
P4ss#word123!

Apply your configuration

When you apply the CR changes, apiconnect instance will be reconfigured. And the related pod will be restarted, the status of instance will be pending until all of configuration setup is done.

Just wait until the configuration is done

Check you SFTP server

Now we can check the SFTP server, is there any backup data that successfully uploaded to the server.

There is one backup on .../base folder called 20240731T160000, the APIC successfully backup to the SFTP

[root@SFTP DC]# ls -lRt
.:
total 0
drwx------. 2 sftpuser sftpuser 168 Jul 31 15:37 ptlb
drwx------. 3 sftpuser sftpuser 64 Jul 31 15:37 mgmtb


./ptlb:
total 0


./mgmtb:
total 0
drwxr-xr-x. 4 sftpuser sftpuser 30 Jul 31 08:00 apiconnect-5019c580-5019c580-db-2024-07-15T043558Z


./mgmtb/apiconnect-5019c580-5019c580-db-2024-07-15T043558Z:
total 0
drwxr-xr-x. 3 sftpuser sftpuser 29 Jul 31 08:00 base
drwxr-xr-x. 3 sftpuser sftpuser 30 Jul 30 15:59 wals


./mgmtb/apiconnect-5019c580-5019c580-db-2024-07-15T043558Z/base:
total 0
drwxr-xr-x. 2 sftpuser sftpuser 70 Jul 31 08:00 20240731T160000


./mgmtb/apiconnect-5019c580-5019c580-db-2024-07-15T043558Z/base/20240731T010000:
total 4
-rw-r--r--. 1 sftpuser sftpuser 1316 Jul 31 08:00 backup.info
-rw-r--r--. 1 sftpuser sftpuser 0 Jul 31 08:00 data.tar.gz


./mgmtb/apiconnect-5019c580-5019c580-db-2024-07-15T043558Z/wals:
total 4
drwxr-xr-x. 2 sftpuser sftpuser 4096 Jul 31 15:00 0000000100000001


./mgmtb/apiconnect-5019c580-5019c580-db-2024-07-15T043558Z/wals/0000000100000001:
total 25348
-rw-r--r--. 1 sftpuser sftpuser 1103711 Jul 31 15:00 00000001000000010000009F.gz
-rw-r--r--. 1 sftpuser sftpuser 1042324 Jul 31 14:00 00000001000000010000009E.gz
-rw-r--r--. 1 sftpuser sftpuser 1062832 Jul 31 13:00 00000001000000010000009D.gz
-rw-r--r--. 1 sftpuser sftpuser 1122383 Jul 31 12:00 00000001000000010000009C.gz
-rw-r--r--. 1 sftpuser sftpuser 1098568 Jul 31 11:00 00000001000000010000009B.gz
-rw-r--r--. 1 sftpuser sftpuser 1047531 Jul 31 10:00 00000001000000010000009A.gz
-rw-r--r--. 1 sftpuser sftpuser 1078648 Jul 31 09:00 000000010000000100000099.gz
-rw-r--r--. 1 sftpuser sftpuser 211 Jul 31 08:00 000000010000000100000098.00000028.backup.gz
-rw-r--r--. 1 sftpuser sftpuser 21095 Jul 31 08:00 000000010000000100000098.gz
-rw-r--r--. 1 sftpuser sftpuser 60237 Jul 31 08:00 000000010000000100000097.gz
-rw-r--r--. 1 sftpuser sftpuser 1041044 Jul 31 07:59 000000010000000100000096.gz
-rw-r--r--. 1 sftpuser sftpuser 1069726 Jul 31 06:59 000000010000000100000095.gz
-rw-r--r--. 1 sftpuser sftpuser 1093605 Jul 31 05:59 000000010000000100000094.gz
-rw-r--r--. 1 sftpuser sftpuser 1094995 Jul 31 04:59 000000010000000100000093.gz
-rw-r--r--. 1 sftpuser sftpuser 1064969 Jul 31 03:59 000000010000000100000092.gz
-rw-r--r--. 1 sftpuser sftpuser 1070917 Jul 31 02:59 000000010000000100000091.gz
-rw-r--r--. 1 sftpuser sftpuser 1080252 Jul 31 01:59 000000010000000100000090.gz
-rw-r--r--. 1 sftpuser sftpuser 1058545 Jul 31 00:59 00000001000000010000008F.gz
[root@OCPSFTPMWDC2 DC2]#

--

--

Danang Priabada
Danang Priabada

Written by Danang Priabada

Red Hat and IBM Product Specialist | JPN : プリアバダ ダナン | CHN : 逹男 | linktr.ee/danangpriabada

No responses yet