Commit 050ef956 authored by Nguyen Hung Manh's avatar Nguyen Hung Manh
Browse files

Feature/backup and restore

parent c1f0630a
data/db
!data/wp/CaddyFile
data/wp/
data/restore/*
backup/data
#!/bin/bash
#
# folder structure for backup:
# wp_backup -- mysql -- mysql-dump-datetime
# -- plugins -- backup/
# -- backup.tar.gz
# -- hash.backup
# -- hash.upload
# -- uploads -- 2019 -- 01 -- backup
# -- backup.tar.gz
# -- SUCCESS.backup
# -- SUCCESS.upload
# -- 02 -- backup/
# -- backup.tar.gz
# -- backup
# -- upload
# source = wp instance
# target = backup directory
# remote = directory in dropbox
# stops immediately when one command fails
# https://stackoverflow.com/questions/19622198/what-does-set-e-mean-in-a-bash-script
trap 'write_log "some error occured exiting now" "ERROR" ; exit' ERR
backup_config="/home/pi/docker/dockerized-wordpress-on-raspberry-pi/backup/config/backup.conf"
debug=1
##############################
## logging function
##############################
function write_log {
local content=$1
local type="INFO"
if [[ $# -eq 2 ]]; then
type="$( echo $2 | tr [:lower:] [:upper:] )"
fi
echo "[$type] [$( date "+%Y%m%d %T" )] $1" >> ${log_dir}/log
}
##############################
## debug log
#############################
function debug {
if [[ $debug -eq 1 ]]; then
echo "[$2] $1"
fi
}
##############################
## set directory
##############################
function set_parameters {
local input_string=$1
# "Application"|"config_dir"|"source_root_dir"|"backup_root_dir"|"remote_root_dir"
application=$( echo $input_string | tr -d '"' | cut -d '|' -f1 )
config_dir=$( echo $input_string | tr -d '"' | cut -d '|' -f2 )
dropbox_config_dir=$config_dir/dropbox_conf
# wp root directory www
source_root_dir=$( echo $input_string | tr -d '"' | cut -d '|' -f3 )
source_upload_dir="$source_root_dir"/wp-content/uploads
# local backup root directory
backup_root_dir=$( echo $input_string | tr -d '"' | cut -d '|' -f4 )
target_plugin_backup_dir="$backup_root_dir"/$application/plugins
target_mysql_backup_dir="$backup_root_dir"/mysql
target_uploads_backup_dir="$backup_root_dir"/$application/uploads
log_dir="$backup_root_dir"/logs
# remote (dropbox) backup directory
remote_root_dir=$(echo $input_string | tr -d '"' | cut -d '|' -f5 )
# creating backup structure
mkdir -p $target_plugin_backup_dir $target_mysql_backup_dir $target_uploads_backup_dir $log_dir
}
##############################
## building dev env
##############################
function build_dev {
# for uploads
for years in {2018..2019}; do
for months in {01..12}; do
for files in {a..g}; do
mkdir -p $source_upload_dir/$years/$months
touch $source_upload_dir/$years/$months/file_$files
echo "touched $source_upload_dir/$years/$months/file_$files "
done
done
done
}
##############################
## check directory
##############################
function check_directory {
if [ ! -d "$source_root_dir" ]; then
write_log "Wordpress root directory wrong" "ERROR"
exit 1
fi
if [ ! -d "$backup_root_dir" ]; then
write_log "Backup root directory wrong" "ERROR"
exit 1
fi
if [ ! -f ${config_dir}/mysql_credentials ]; then
write_log "No mysql_credentials config file" "ERROR"
exit 1
fi
}
##############################
## upload helper
##############################
function uploader {
local hash_upload=$1
local backup_dir=$2
local backup_schema=$3
local backup_filename=$4
local remote_dir=$5
local log_type=$6
if [[ ! -n $hash_upload ]]; then
write_log "${backup_schema} Initial backup "
else
write_log "${backup_schema} Changes detected"
fi
write_log "${backup_schema} Start uploading file $backup_filename "
# evtl todo check for existing hash, else rm hash, upload both files
write_log "Using config in ${dropbox_config_dir}, mounting ${backup_dir}, using remote structure: $remote_dir/$backup_schema"
docker run -i --rm -v ${dropbox_config_dir}:/config \
-v ${backup_dir}:/wp_backup/ \
hunnguye/dropbox_loader:1.0 upload \
/wp_backup/${backup_schema}/${backup_filename} \
$remote_dir/$backup_schema/
if [[ -n $( docker run -i --rm -v ${dropbox_config_dir}:/config hunnguye/dropbox_loader:1.0 list $remote_dir/$backup_schema | grep ${backup_filename} ) ]]; then
write_log "${backup_schema} upload successful"
else
write_log "${backup_schema} upload not successful" "ERROR"
exit 1
fi
}
##############################
## backup mariadb
##############################
function backup_mariadb {
write_log "########### INITIATE BACKUP MARIADB ###########"
local mysql_container=$(docker ps | sed 's/ */|/g' | cut -d '|' -f7 | grep webservice_mariadb)
local sql_backupfilename=$( date +%Y%m%d )_mysqldumb_backup.sql
local remote_mysql_backup_dir="$remote_root_dir"/mysql
local mysql_user=$( cat ${config_dir}/mysql_credentials | grep MYSQL_USER | cut -d '=' -f2 )
local mysql_pwd=$( cat ${config_dir}/mysql_credentials | grep MYSQL_PASSWORD | cut -d '=' -f2 )
if [ -z "$mysql_container" ]; then
write_log "no container found with mariadb in name" "ERROR"
exit 1
else
write_log "found container $mysql_container"
fi
# now in backup/mysql
cd $target_mysql_backup_dir
# ToDo check whether credentials are filled
# extract mysql db to host backup directory
docker exec $mysql_container /usr/bin/mysqldump --user="${mysql_user}" --password="${mysql_pwd}" --all-databases > $sql_backupfilename
write_log "backup done to $target_mysql_backup_dir/${sql_backupfilename}"
# compress mysql data
sql_backupfilename_compressed=${sql_backupfilename}.tar.gz
tar -czvf ${sql_backupfilename_compressed} ${sql_backupfilename}
orig_size=$( du -sh ${sql_backupfilename} | cut -f1 )
compressed_size=$( du -sh ${sql_backupfilename_compressed} | cut -f1 )
write_log "Compressed file to ${sql_backupfilename_compressed} original size: ${orig_size}, compressed size: ${compressed_size} "
# upload
uploader ''\
"$target_mysql_backup_dir" \
'' \
"$sql_backupfilename.tar.gz" \
"$remote_mysql_backup_dir" \
"MARIADB"
write_log "########### FINISHED BACKUP MARIADB ###########"
}
##############################
## backup plugins
##############################
function backup_plugins {
write_log "########### INITIATE BACKUP PLUGINS $application ###########"
cd "$target_plugin_backup_dir"
local remote_plugin_backup_dir="$remote_root_dir"/$application/plugins
local source_plugin_dir="$source_root_dir"/wp-content/plugins/
write_log "Local backup plugins from $source_plugin_dir to $target_plugin_backup_dir/backup"
rsync -a --delete $source_plugin_dir backup/
tar -czvf plugins_backup.tar.gz -C backup/ . | sha1sum | cut -d ' ' -f1 > "hash.backup"
# setting hashes
hash_backup=$(cat "hash.backup")
hash_upload=$(cat "hash.upload")
if [[ $hash_backup != $hash_upload ]]
then
# func: uploader hash_upload, backup_dir, schema, backup_filename, remote_dir, log_type
uploader "$hash_upload" \
"$target_plugin_backup_dir" \
"" \
"plugins_backup.tar.gz" \
"$remote_plugin_backup_dir"
echo "$hash_backup" > "hash.upload"
else
write_log "No changes detected, no upload needed"
fi
write_log "########### FINISHED BACKUP PLUGINS $application ###########"
}
##############################
## backup uploads
##############################
function backup_uploads {
cd $target_uploads_backup_dir
local remote_upload_backup_dir="$remote_root_dir"/$application/uploads
write_log "########### INITIATE BACKUP UPLOADS $application ###########"
for years_folder in $source_upload_dir/*; do
years=$( basename "$years_folder")
# check if this is a folder containing numbers = upload folder or some other
if ! [[ $years =~ ^[0-9]+$ ]]; then
continue
fi
for months_folder in $source_upload_dir/$years/*; do
local months=$( basename "$months_folder" )
local backup_filename=${years}${months}_uploads_backup.tar.gz
local folder_schema="${years}/${months}"
cd $target_uploads_backup_dir
# creating path in backup directory
mkdir -p $years/$months/
cd $years/$months/
# checking for SUCCESS flag
if [ -f SUCCESS ]; then
local number_of_files=$( cat SUCCESS | cut -d '|' -f1 )
local file_size=$( cat SUCCESS | cut -d '|' -f2 )
write_log "${folder_schema} Skipping folder - already succeeded. Files: $number_of_files | Size: $file_size "
continue
fi
local number_of_files_before=$( ls backup/* | wc -l )
# syncing files to local backup
write_log "Local backup uploads from $source_upload_dir/$years/$months to $target_uploads_backup_dir/$years/$months"
rsync -a --delete $source_upload_dir/$years/$months backup
tar -czvf $backup_filename -C backup/ . | sha1sum | cut -d ' ' -f1 > "hash.backup"
# setting hashes
local hash_backup=$(cat "hash.backup")
local hash_upload=$(cat "hash.upload")
local number_of_files=$( ls backup/* | wc -l )
local file_size=$( du -sh backup | cut -f1 )
# checking changes of local and remote backup
if [[ $hash_backup != $hash_upload ]]; then
# func: uploader hash_upload, backup_dir, schema, backup_filename, remote_dir, log_type
uploader "$hash_upload" \
"$target_uploads_backup_dir" \
"${folder_schema}" \
"$backup_filename" \
"$remote_upload_backup_dir" \
# log_type depricated:no need
echo "$hash_backup" > "hash.upload"
write_log "${folder_schema} files before sync: $number_of_files_before | files after sync: $number_of_files "
else
write_log "${folder_schema} No changes detected, no upload needed"
fi
# set SUCCESS flag if month is already over
if [[ $(date +%Y%m) -gt $years$months ]]; then
echo "$number_of_files|$file_size" > SUCCESS
write_log "${folder_schema} Closing folder with SUCCESS flag"
write_log "${folder_schema} Backed up Files: $number_of_files | Size: $file_size"
write_log "${folder_schema} Cleaning uncompressed files"
rm -r backup
fi
write_log "${folder_schema} Backup process finished"
done
done
write_log "########### FINISHED BACKUP UPLOADS $application ###########"
}
############################################################
## cleanup mysqldump
############################################################
function cleanup_mysqldumb {
local remote_mysql_backup_dir="$remote_root_dir"/mysql
local last_date_to_remove=$( date -d "-30 days" "+%Y%m%d" )
cd $target_mysql_backup_dir
for files in *;do
local file=$( basename "$files" )
local file_date=$( echo "$file" | cut -d'_' -f1 )
if [[ $file_date -le last_date_to_remove ]]; then
rm $file
docker run -i --rm -v ${dropbox_config_dir}:/config hunnguye/dropbox_loader:1.0 delete "$remote_mysql_backup_dir"/"$file"
write_log "Removed local and remote backup of file $file " "INFO"
fi
done
}
############################################################
## main
############################################################
for input_string in $( tail -n+2 $backup_config | cut -d '|' -f1-5 ); do
set_parameters $input_string
check_directory
#build_dev
backup_plugins
backup_uploads
done
backup_mariadb
cleanup_mysqldumb
"Application"|"config_dir"|"source_root_dir"|"backup_root_dir"|"remote_root_dir"
"app1"|"/home/pi/docker/dockerized-wordpress-on-raspberry-pi/backup/config"|"/home/pi/docker/dockerized-wordpress-on-raspberry-pi/data/wp/www"|"/home/pi/docker/dockerized-wordpress-on-raspberry-pi/backup/data"|"/wp_backup"
/home/pi/docker/dockerized-wordpress-on-raspberry-pi/data/env_variables/mysql_credentials
\ No newline at end of file
......@@ -34,6 +34,7 @@ services:
image: yobasystems/alpine-mariadb
volumes:
- "./data/db:/var/lib/mysql" # location where you persist DB files
- "./data/restore:/restore" # mount folder to store mysql_dumb file
restart: always
env_file:
- "./data/env_variables/mysql_credentials" # passing env variables to initiate a new DB
......
#!/bin/bash
echo "##### Downloading latest Wordpress Version #####"
curl -sL http://wordpress.org/latest.tar.gz | tar --strip 1 -xz -C data/wp/www
echo "##### Download finished, copy preconfigured wp-config.php into data/wp/www #####"
cp wp-config.php data/wp/www/
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment