cd /home/myusername/
mkdir scripts backups
Dont forget to chmod the script for example:
sudo chmod +x portainer-backup.sh
cd /home/myusername/scripts
nano docker_network_toolbox.sh
#!/bin/bash
echo -e "\033]0;Docker Network Toolbox\007"
# ANSI Escape Code for Colors
reset="\033[0m"
white_fg_strong="\033[90m"
red_fg_strong="\033[91m"
green_fg_strong="\033[92m"
yellow_fg_strong="\033[93m"
blue_fg_strong="\033[94m"
magenta_fg_strong="\033[95m"
cyan_fg_strong="\033[96m"
# Normal Background Colors
red_bg="\033[41m"
blue_bg="\033[44m"
yellow_bg="\033[43m"
backup_folder="docker_network_backups"
# Function to log messages with timestamps and colors
log_message() {
# This is only time
current_time=$(date +'%H:%M:%S')
# This is with date and time
# current_time=$(date +'%Y-%m-%d %H:%M:%S')
case "$1" in
"INFO")
echo -e "${blue_bg}[$current_time]${reset} ${blue_fg_strong}[INFO]${reset} $2"
;;
"WARN")
echo -e "${yellow_bg}[$current_time]${reset} ${yellow_fg_strong}[WARN]${reset} $2"
;;
"ERROR")
echo -e "${red_bg}[$current_time]${reset} ${red_fg_strong}[ERROR]${reset} $2"
;;
*)
echo -e "${blue_bg}[$current_time]${reset} ${blue_fg_strong}[DEBUG]${reset} $2"
;;
esac
}
# Check if jq is installed, and if not, install it
if ! command -v jq &> /dev/null; then
log_message "INFO" "Installing jq..."
# Installation commands based on the system's package manager
if command -v apt &> /dev/null; then
sudo apt install -y jq
elif command -v yum &> /dev/null; then
sudo yum install -y jq
elif command -v brew &> /dev/null; then
brew install jq
else
log_message "ERROR" "jq is not installed, and no known package manager found. Please install jq manually."
exit 1
fi
log_message "INFO" "jq installed successfully."
fi
# Function to backup Docker networks
backup_networks() {
log_message "INFO" "Backing up Docker networks..."
mkdir -p "$backup_folder"
# Iterate through Docker networks
docker network ls --format "{{.Name}}" | while read -r network_name; do
# Export network configuration to a backup file in the backup folder
docker network inspect "$network_name" > "$backup_folder/network_${network_name}_backup.json"
done
log_message "INFO" "Network backup completed. Files are stored in: $PWD/$backup_folder"
read -p "Press Enter to continue..."
home
}
# Function to restore Docker networks
restore_networks() {
log_message "INFO" "Restoring Docker networks..."
# Check if the backup folder exists
if [ -d "$backup_folder" ]; then
# Iterate through network backup files in the backup folder
for backup_file in "$backup_folder"/network_*_backup.json; do
# Extract network name from the backup file name
network_name=$(basename "$backup_file" | sed 's/network_\(.*\)_backup.json/\1/')
# Check if the network already exists
if [ -z "$(docker network ls --filter name="$network_name" -q)" ]; then
# Read network configuration from JSON file
driver=$(jq -r '.[0].Driver' "$backup_file")
case "$driver" in
"bridge")
subnet=$(jq -r '.[0].IPAM.Config[0].Subnet' "$backup_file")
gateway=$(jq -r '.[0].IPAM.Config[0].Gateway' "$backup_file")
iprange=$(jq -r '.[0].IPAM.Config[0].IPRange' "$backup_file")
internal=$(jq -r '.[0].Internal' "$backup_file")
attachable=$(jq -r '.[0].Attachable' "$backup_file")
ingress=$(jq -r '.[0].Ingress' "$backup_file")
# Create the bridge network using the extracted configuration
docker network create \
--driver="$driver" \
--subnet="$subnet" \
--ip-range="$iprange" \
--gateway="$gateway" \
--internal="$internal" \
--attachable="$attachable" \
--ingress="$ingress" \
"$network_name"
;;
"macvlan")
parent=$(jq -r '.[0].Options.parent' "$backup_file")
# Create the macvlan network using the extracted configuration
docker network create \
--driver="$driver" \
--subnet="$subnet" \
--ip-range="$iprange" \
--gateway="$gateway" \
--internal="$internal" \
--attachable="$attachable" \
--config-from="$config_from" \
--config-only="$config_only" \
--options="parent=$parent" \
"$network_name"
;;
"null")
# Create the null network using the extracted configuration
docker network create \
--driver="$driver" \
--config-from="$config_from" \
--config-only="$config_only" \
"$network_name"
;;
*)
log_message "WARN" "Unsupported driver '$driver' for network '$network_name'. Skipping restore."
continue
;;
esac
log_message "INFO" "Network '$network_name' restored."
else
log_message "WARN" "Network '$network_name' already exists. Skipping restore."
fi
done
else
log_message "ERROR" "Backup folder '$backup_folder' not found. Please ensure that you have previously backed up your networks."
read -p "Press Enter to continue..."
home
fi
read -p "Press Enter to continue..."
home
}
# Home menu
home() {
while true; do
echo -e "\033]0;Docker Network Toolbox [HOME]\007"
clear
echo -e "${blue_fg_strong}/ Home${reset}"
echo "-------------------------------------"
echo "What would you like to do?"
echo "1. Backup Docker Networks"
echo "2. Restore Docker Networks"
echo "3. Exit"
read -p "Choose Your Destiny: " home_choice
# Default to choice 1 if no input is provided
if [ -z "$home_choice" ]; then
home_choice=1
fi
case $home_choice in
1) backup_networks; break ;;
2) restore_networks; break ;;
3) exit ;;
*) echo -e "${yellow_fg_strong}WARNING: Invalid number. Please insert a valid number.${reset}"
read -p "Press Enter to continue..."
home ;;
esac
done
}
# Start the home menu
home
cd /home/myusername/scripts
nano portainer-backup.sh
#!/bin/bash
# Set the Portainer container name
CONTAINER_NAME="portainer"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/portainer"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the Portainer data directory from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x portainer-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/portainer-backup.sh
cd /home/myusername/scripts
nano dashy-backup.sh
#!/bin/bash
# Set the Dashy container name
CONTAINER_NAME="dashy"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/dashy"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the Dashy directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/public/conf.yml "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/app/public/item-icons/icons "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x dashy-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/dashy-backup.sh
cd /home/myusername/scripts
nano hpage-backup.sh
#!/bin/bash
# Set the hpage container name
CONTAINER_NAME="hpage"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/hpage"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the hpage directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/config "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/app/public/icons "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/app/public/images "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x hpage-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/hpage-backup.sh
cd /home/myusername/scripts
nano homarr-backup.sh
#!/bin/bash
# Set the homarr container name
CONTAINER_NAME="homarr"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/homarr"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the homarr directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/data/configs "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/app/public/icons "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x homarr-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/homarr-backup.sh
cd /home/myusername/scripts
nano nginxpm-backup.sh
#!/bin/bash
# Set the nginxpm container name
CONTAINER_NAME="nginxpm"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/nginxpm"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the nginxpm directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/etc/letsencrypt "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x nginxpm-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/nginxpm-backup.sh
cd /home/myusername/scripts
nano navidrome-backup.sh
#!/bin/bash
# Set the navidrome container name
CONTAINER_NAME="navidrome"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/navidrome"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the navidrome directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x navidrome-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/navidrome-backup.sh
cd /home/myusername/scripts
nano snapdrop-backup.sh
#!/bin/bash
# Set the snapdrop container name
CONTAINER_NAME="snapdrop"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/snapdrop"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the snapdrop directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x snapdrop-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/snapdrop-backup.sh
cd /home/myusername/scripts
nano homeassistant-backup.sh
#!/bin/bash
# Set the homeassistant container name
CONTAINER_NAME="homeassistant"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/homeassistant"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the homeassistant directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x homeassistant-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/homeassistant-backup.sh
cd /home/myusername/scripts
nano jellyfin-backup.sh
#!/bin/bash
# Set the jellyfin container name
CONTAINER_NAME="jellyfin"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/jellyfin"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the jellyfin directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x jellyfin-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/jellyfin-backup.sh
cd /home/myusername/scripts
nano jellyseerr-backup.sh
#!/bin/bash
# Set the jellyseerr container name
CONTAINER_NAME="jellyseerr"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/jellyseerr"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the jellyseerr directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x jellyseerr-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/jellyseerr-backup.sh
cd /home/myusername/scripts
nano radarr-backup.sh
#!/bin/bash
# Set the radarr container name
CONTAINER_NAME="radarr"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/radarr"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the radarr directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x radarr-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/radarr-backup.sh
cd /home/myusername/scripts
nano sonarr-backup.sh
#!/bin/bash
# Set the sonarr container name
CONTAINER_NAME="sonarr"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/sonarr"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the sonarr directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x sonarr-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/sonarr-backup.sh
cd /home/myusername/scripts
nano jackett-backup.sh
#!/bin/bash
# Set the jackett container name
CONTAINER_NAME="jackett"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/jackett"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the jackett directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x jackett-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/jackett-backup.sh
cd /home/myusername/scripts
nano qbittorrent-backup.sh
#!/bin/bash
# Set the qbittorrent container name
CONTAINER_NAME="qbittorrent"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/qbittorrent"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the qbittorrent directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x qbittorrent-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/qbittorrent-backup.sh
cd /home/myusername/scripts
nano unbound-backup.sh
#!/bin/bash
# Set the unbound container name
CONTAINER_NAME="unbound"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/unbound"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the unbound directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/opt/unbound/etc/unbound "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x unbound-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/unbound-backup.sh
cd /home/myusername/scripts
nano pihole-backup.sh
#!/bin/bash
# Set the pihole container name
CONTAINER_NAME="pihole"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/pihole"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the pihole directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/etc/pihole "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/etc/dnsmasq.d "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/etc/lighttpd "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/www/html/pihole "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x pihole-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/pihole-backup.sh
cd /home/myusername/scripts
nano hastebin-backup.sh
#!/bin/bash
# Set the hastebin container name
CONTAINER_NAME="hastebin"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/hastebin"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the hastebin directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x hastebin-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/hastebin-backup.sh
cd /home/myusername/scripts
nano nextcloud-backup.sh
#!/bin/bash
# Set the nextcloud container name and database container name
CONTAINER_NAME="nextcloud"
DB_CONTAINER_NAME="nextcloud-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/nextcloud"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the nextcloud directory and database from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/html "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/nextcloud-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x nextcloud-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/nextcloud-backup.sh
cd /home/myusername/scripts
nano duckdns-backup.sh
#!/bin/bash
# Set the duckdns container name
CONTAINER_NAME="duckdns"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/duckdns"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the duckdns directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x duckdns-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/duckdns-backup.sh
cd /home/myusername/scripts
nano vaultwarden-backup.sh
#!/bin/bash
# Set the vaultwarden container name
CONTAINER_NAME="vaultwarden"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/vaultwarden"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the vaultwarden directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x vaultwarden-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/vaultwarden-backup.sh
cd /home/myusername/scripts
nano rocketchat-backup.sh
#!/bin/bash
# Set the rocketchat container name and database container name
CONTAINER_NAME="rocketchat"
DB_CONTAINER_NAME="rocketchat-mongo"
DATABASE_NAME="rocketchat-db"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/rocketchat"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the rocketchat directory and database from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/html "$BACKUP_PATH"
docker exec "$CONTAINER_NAME"_mongo mongodump --out "$BACKUP_PATH" --db "$DATABASE_NAME"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x rocketchat-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/rocketchat-backup.sh
cd /home/myusername/scripts
nano synapse-backup.sh
#!/bin/bash
# Set the synapse container name
CONTAINER_NAME="synapse"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/synapse"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the synapse directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x synapse-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/synapse-backup.sh
cd /home/myusername/scripts
nano uptimekuma-backup.sh
#!/bin/bash
# Set the uptimekuma container name
CONTAINER_NAME="uptimekuma"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/uptimekuma"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the uptimekuma directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x uptimekuma-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/uptimekuma-backup.sh
cd /home/myusername/scripts
nano wireguard-backup.sh
#!/bin/bash
# Set the wireguard container name
CONTAINER_NAME="wireguard"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/wireguard"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the wireguard directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x wireguard-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/wireguard-backup.sh
cd /home/myusername/scripts
nano seafile-backup.sh
#!/bin/bash
# Set the seafile container name and database container name
CONTAINER_NAME="seafile"
DB_CONTAINER_NAME="seafile-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/seafile"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the seafile directory and database from the container to the backup directory
docker cp "$CONTAINER_NAME":/shared "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/seafile-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x seafile-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/seafile-backup.sh
cd /home/myusername/scripts
nano grafana-backup.sh
#!/bin/bash
# Set the grafana container name
CONTAINER_NAME="grafana"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/grafana"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the grafana directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/lib/grafana "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x grafana-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/grafana-backup.sh
cd /home/myusername/scripts
nano prometheus-backup.sh
#!/bin/bash
# Set the prometheus container name
CONTAINER_NAME="prometheus"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/prometheus"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the prometheus directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/prometheus "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x prometheus-backup.sh
sudo crontab -e
cd /home/myusername/scripts
nano pufferpanel-backup.sh
#!/bin/bash
# Set the pufferpanel container name
CONTAINER_NAME="pufferpanel"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/pufferpanel"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the pufferpanel directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/etc/pufferpanel "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/lib/pufferpanel "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x pufferpanel-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/pufferpanel-backup.sh
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/prometheus-backup.sh
cd /home/myusername/scripts
nano authelia-backup.sh
#!/bin/bash
# Set the authelia container name and database container name
CONTAINER_NAME="authelia"
DB_CONTAINER_NAME="authelia-redis"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/authelia"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the authelia directory and database from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
docker cp "$DB_CONTAINER_NAME":/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x authelia-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/authelia-backup.sh
cd /home/myusername/scripts
nano website01-nginx-backup.sh
#!/bin/bash
# Set the website01-nginx container name
CONTAINER_NAME="website01-nginx"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/website01-nginx"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the website01-nginx directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/usr/share/nginx/html "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/etc/nginx/nginx.conf "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x website01-nginx-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/website01-nginx-backup.sh
cd /home/myusername/scripts
nano wordpress01-backup.sh
#!/bin/bash
# Set the wordpress01 container name
CONTAINER_NAME="wordpress01"
DB_CONTAINER_NAME="wordpress01-mysql"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/wordpress01"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the wordpress01 directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/html "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" mysqldump -u "$MYSQL_USER" -p "$MYSQL_PASSWORD" "$MYSQL_DATABASE" > "$BACKUP_PATH/wordpress01-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x wordpress01-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/wordpress01-backup.sh
cd /home/myusername/scripts
nano webtop-backup.sh
#!/bin/bash
# Set the webtop container name
CONTAINER_NAME="webtop"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/webtop"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the webtop directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x webtop-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/webtop-backup.sh
cd /home/myusername/scripts
nano mango-backup.sh
#!/bin/bash
# Set the mango container name
CONTAINER_NAME="mango"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/mango"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the mango directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/root/mango "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/root/.config/mango "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x mango-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/mango-backup.sh
cd /home/myusername/scripts
nano filebrowser-backup.sh
#!/bin/bash
# Set the filebrowser container name
CONTAINER_NAME="filebrowser"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/filebrowser"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the filebrowser directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/srv "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/database.db "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/filebrowser.json "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x filebrowser-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/filebrowser-backup.sh
cd /home/myusername/scripts
nano uvdesk-backup.sh
#!/bin/bash
# Set the uvdesk container name
CONTAINER_NAME="uvdesk"
DB_CONTAINER_NAME="uvdesk-mysql"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/uvdesk"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the uvdesk directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/usr/share/nginx/html "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p "$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/uvdesk-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x uvdesk-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/uvdesk-backup.sh
cd /home/myusername/scripts
nano glpi-backup.sh
#!/bin/bash
# Set the glpi container name
CONTAINER_NAME="glpi"
DB_CONTAINER_NAME="glpi-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/glpi"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the glpi directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/html/glpi "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/glpi-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x glpi-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/glpi-backup.sh
cd /home/myusername/scripts
nano matomo-backup.sh
#!/bin/bash
# Set the matomo container name
CONTAINER_NAME="matomo-app"
WEB_CONTAINER_NAME="matomo-web"
DB_CONTAINER_NAME="matomo-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/matomo"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the matomo directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/html "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/matomo-db.sql"
docker cp "$WEB_CONTAINER_NAME":/etc/nginx/conf.d/default.conf "$BACKUP_PATH"
docker cp "$WEB_CONTAINER_NAME":/var/www/html "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x matomo-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/matomo-backup.sh
cd /home/myusername/scripts
nano fail2ban-backup.sh
#!/bin/bash
# Set the fail2ban container name
CONTAINER_NAME="fail2ban"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/fail2ban"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the fail2ban directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/log/npm/ "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/log/auth.log "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x fail2ban-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/fail2ban-backup.sh
cd /home/myusername/scripts
nano wikijs-backup.sh
#!/bin/bash
# Set the wikijs container name
CONTAINER_NAME="wikijs"
DB_CONTAINER_NAME="wikijs-postgres"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/wikijs"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the wikijs directory's from the container to the backup directory
docker exec "$DB_CONTAINER_NAME" pg_dump -U "$POSTGRES_USER" -d "$POSTGRES_DB" -Fc -W "$POSTGRES_PASSWORD" > "$BACKUP_PATH/wiki-db.dump"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x wikijs-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/wikijs-backup.sh
cd /home/myusername/scripts
nano openproject-backup.sh
#!/bin/bash
# Set the openproject container name
CONTAINER_NAME="openproject"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/openproject"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the openproject directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/openproject/pgdata "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/openproject/assets "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x openproject-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/openproject-backup.sh
cd /home/myusername/scripts
nano dolibarr-backup.sh
#!/bin/bash
# Set the dolibarr container name
CONTAINER_NAME="dolibarr"
DB_CONTAINER_NAME="dolibarr-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/dolibarr"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the dolibarr directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/documents "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/www/html/custom "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/dolibarr-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x dolibarr-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/dolibarr-backup.sh
cd /home/myusername/scripts
nano humhub-backup.sh
#!/bin/bash
# Set the humhub container name
CONTAINER_NAME="humhub"
DB_CONTAINER_NAME="humhub-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/humhub"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the humhub directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/var/www/localhost/htdocs/protected/config "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/www/localhost/htdocs/uploads "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/www/localhost/htdocs/protected/config "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/www/localhost/htdocs/protected/modules "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/humhub-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x humhub-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/humhub-backup.sh
cd /home/myusername/scripts
nano guacamole-backup.sh
#!/bin/bash
# Set the guacamole container name
CONTAINER_NAME="guacamole"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/guacamole"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the guacamole directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x guacamole-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/guacamole-backup.sh
cd /home/myusername/scripts
nano rustdesk-backup.sh
#!/bin/bash
# Set the rustdesk container name
CONTAINER_NAME="rustdesk-hbbs"
HBBR_CONTAINER_NAME="rustdesk-hbbr"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/rustdesk"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the rustdesk directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/root "$BACKUP_PATH"
docker cp "$HBBR_CONTAINER_NAME":/root "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x rustdesk-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/rustdesk-backup.sh
cd /home/myusername/scripts
nano remotely-backup.sh
#!/bin/bash
# Set the remotely container name
CONTAINER_NAME="remotely"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/remotely"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the remotely directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/remotely-data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x remotely-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/remotely-backup.sh
cd /home/myusername/scripts
nano pwndrop-backup.sh
#!/bin/bash
# Set the pwndrop container name
CONTAINER_NAME="pwndrop"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/pwndrop"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the pwndrop directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x pwndrop-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/pwndrop-backup.sh
cd /home/myusername/scripts
nano ipboard-backup.sh
#!/bin/bash
# Set the ipboard container name
CONTAINER_NAME="ipboard"
DB_CONTAINER_NAME="ipboard-mariadb"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/ipboard"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the ipboard directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app "$BACKUP_PATH"
docker exec "$DB_CONTAINER_NAME" sh -c 'exec mysqldump --all-databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > "$BACKUP_PATH/humhub-db.sql"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x ipboard-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/ipboard-backup.sh
cd /home/myusername/scripts
nano teleport-backup.sh
#!/bin/bash
# Set the teleport container name
CONTAINER_NAME="teleport"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/teleport"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the teleport directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/etc/teleport "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/var/lib/teleport "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x teleport-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/teleport-backup.sh
cd /home/myusername/scripts
nano upsnap-backup.sh
#!/bin/bash
# Set the upsnap container name
CONTAINER_NAME="upsnap"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/upsnap"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the upsnap directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/app/backend/db/ "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x upsnap-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/upsnap-backup.sh
cd /home/myusername/scripts
nano kasm-backup.sh
#!/bin/bash
# Set the kasm container name
CONTAINER_NAME="kasm"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/kasm"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the kasm directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/opt "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/profiles "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x kasm-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/kasm-backup.sh
cd /home/myusername/scripts
nano ispy-backup.sh
#!/bin/bash
# Set the ispy container name
CONTAINER_NAME="ispy"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/ispy"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the ispy directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/agent/Media/XML "$BACKUP_PATH"
docker cp "$CONTAINER_NAME":/agent/Commands "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x ispy-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/ispy-backup.sh
cd /home/myusername/scripts
nano unifi-controller-backup.sh
#!/bin/bash
# Set the unifi-controller container name
CONTAINER_NAME="unifi-controller"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/unifi-controller"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the unifi-controller directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/config "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x unifi-controller-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/unifi-controller-backup.sh
cd /home/myusername/scripts
nano gitea-backup.sh
#!/bin/bash
# Set the gitea container name
CONTAINER_NAME="gitea"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/gitea"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the gitea directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/data "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x gitea-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/gitea-backup.sh
cd /home/myusername/scripts
nano gitlab-backup.sh
#!/bin/bash
# Set the gitlab container name
CONTAINER_NAME="gitlab-runner"
WEB_CONTAINER_NAME="gitlab-ce"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/gitlab"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the gitlab directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/etc/gitlab-runner "$BACKUP_PATH"
docker cp "$WEB_CONTAINER_NAME":/etc/gitlab "$BACKUP_PATH"
docker cp "$WEB_CONTAINER_NAME":/var/log/gitlab "$BACKUP_PATH"
docker cp "$WEB_CONTAINER_NAME":/var/opt/gitlab "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x gitlab-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/gitlab-backup.sh
cd /home/myusername/scripts
nano changedetection-backup.sh
#!/bin/bash
# Set the changedetection container name
CONTAINER_NAME="changedetection"
# Set the backup directory
BACKUP_DIR="/home/myusername/backups/changedetection"
# Create a new directory for the backup
BACKUP_DATE=$(date +"%Y-%m-%d_%H-%M-%S")
BACKUP_PATH="$BACKUP_DIR/$BACKUP_DATE"
mkdir -p "$BACKUP_PATH"
# Copy the changedetection directory's from the container to the backup directory
docker cp "$CONTAINER_NAME":/datastore "$BACKUP_PATH"
# Compress the backup directory
tar -czf "$BACKUP_PATH.tar.gz" -C "$BACKUP_DIR" "$BACKUP_DATE"
# Remove the uncompressed backup directory
rm -rf "$BACKUP_PATH"
# Prune old backups (keep the last 7 days)
find "$BACKUP_DIR" -name "*.tar.gz" -type f -mtime +7 -delete
sudo chmod +x changedetection-backup.sh
sudo crontab -e
# This cron job runs at midnight every day (0 0 * * *)
# and executes the script located at /home/myusername/scripts
0 0 * * * /home/myusername/scripts/changedetection-backup.sh