Initial commit

This commit is contained in:
leca 2023-02-04 06:33:08 +03:00
commit aa1403d8e3
5 changed files with 227 additions and 0 deletions

92
backup/autobackup.sh Executable file
View File

@ -0,0 +1,92 @@
#!/bin/bash
now=$(date +"%s")
backup_dir="/backup/$now"
if [ $(whoami) != root ]; then
echo "You must run it as sudo."
exit 1
fi
log() {
echo "[$(date)] $*"
}
backup_db() {
sudo -iu postgres pg_dump -Fc "$1" > "${backup_dir}/${1}.dump"
}
backup_dir() {
tar cpf "${backup_dir}/${1}.tar" "$2"
}
checksum() {
client=$(md5sum "${backup_dir}/$1")
clientSUM=${client%' '*}
server=$(sudo -iu foxbackup ssh -p2442 42.42.42.2 "md5sum /mnt/foxbackups/backup1/$now/$1")
serverSUM=${server%' '*}
if [[ $clientSUM == $serverSUM ]]; then
echo "[$(date)] Hash-sum of $1 on the client and on the server are equal. "
else
echo "[$(date)] WARNING!!! HASH-SUM $1 ON THE CLIENT AND ON THE SERVER ARE NOT EQUAL!"
echo "WARNIG!!! HASH-SUM OF $1 ON THE CLIENT AND ON THE SERVER ARE NOT EQUAL!" >> /etc/motd
fi
}
startBackup() {
#Start
mkdir -p "${backup_dir}"
log "Starting backup"
#Backing up a directories
log "Backing up /opt..."
backup_dir opt /opt
log "/opt is backed up, starting to backup nextcloud..."
backup_dir nextcloud /var/www/nextcloud
log "Nextcloud is backed up, starting to backup game servers..."
backup_dir servers /home/servers
log "Servers are backed up, starting to backup mailbox."
backup_dir mail /var/mail
log "Mail are backed up, starting to backup peertube"
backup_dir peertube /var/www/peertube
log "Peertube are backed up, starting to backup DBs..."
#Backing up a DBs
log "Backing up synapse DB"
backup_db synapse
log "Synapse DB is backed up, backing up nextcloud DB..."
backup_db nextclouddb
log "Nextcloud DB is backed up, backing up pleroma DB..."
backup_db pleroma
log "Pleroma DB is backed up, backing up peerube DB..."
backup_db peertube_prod
log "Peertube DB is backed up, compiling all to one and sending to pomoika..."
#Sending to backup server
tar cvf - "${backup_dir}" | sudo -iu foxbackup ssh -p2442 foxbackup@42.42.42.2 "cd backup; tar xf -; /home/foxbackup/autobackup.sh"
#Check md5sum
echo "[$(date)] Done! All data on pomoika! Starting to check the md5sum..."
log "Checking opt sum"
checksum opt.tar
log "Checking peertube sum"
checksum peertube.tar
log "Checking nextcloud sum"
checksum nextcloud.tar
log "Checking servers sum"
checksum servers.tar
log "Checking mail sum"
checksum mail.tar
log "Checking mail_home sum"
checksum mail_home.tar
log "Checking synapse sum"
checksum synapse.dump
log "Checking nextclouddb sum"
checksum nextclouddb.dump
log "Checking pleroma sum"
checksum pleroma.dump
log "Checking peertube sum"
checksum peertube.dump
echo "Checked the md5sum. See above to errors."
}
startBackup
### to restore
#
#pg_restore -d postgres --clean --create "file backup"

54
backup/autobackup_daxik.sh Executable file
View File

@ -0,0 +1,54 @@
#!/bin/bash
#Autobackup script for mastodo.ru by leca@foxarmy.org
#The directory where all of backups are stored
backup_dir="/backups/"
#Directories or files to make a back up of
directories=("/home/mastodon" "/var/lib/redis/dump.rdb")
#Empty array for writing names of archives e.g. /home/mastodon -> mastodon.tar
names=()
#PostgreSQL databases to make a back up of
databases=("mastodon_production")
#Determining a unix time to create backup dir. in /backups
date=$(date +"%s")
#This program must be ran with root privileges because its sometimes reading sensetive information and have to has access to database
if [ $UID != "0" ]; then
echo "You must run this script as root."
exit 1;
fi
#Creating necessary directories
mkdir -p "${backup_dir}${date}"
#the main body of backup
#loop that takes every directory from an array and backing it up
for name in ${directories[@]}; do
IFS="/"
temp="${name}"
read -rasplitIFS<<< "${temp}"
len=${#splitIFS[@]}
names+=(${splitIFS[$len-1]})
IFS=''
done
#Loop that goes through names and backing them up
length=${#directories[@]}
i=0
while [ $i -lt $length ]; do
tar cf "${backup_dir}${date}/${names[i]}.tar" ${directories[i]}
i=$(( $i + 1 ))
done
#Loop that goes through databases and backing them up
for db in ${databases[@]}; do
sudo -iu postgres pg_dump -Fc "${db}" > "${backup_dir}${date}/${db}.dump"
done
#Executing a script that manages backups
cd "${backup_dir}"
./backup_manager.sh
#NOTE: to restore a postgresql database from its dump file: pg_restore -d postgres --clean --create "backup.dump"

62
backup/backup.sh Executable file
View File

@ -0,0 +1,62 @@
#!/bin/bash
now=$(date +"%s")
backup_dir="/backup/$now"
# functions
log() {
echo "[$(date)] $*"
}
backup_pg_db() {
sudo -iu postgres pg_dump -Fc "$1" > "${backup_dir}/${1}.dump"
}
backup_dir() {
tar cpf "${backup_dir}/${1}.tar" "$2"
}
# main
if [ $UID != "0" ]; then
echo "You must run it as root."
exit 1
fi
echo -n "Start backup? (y/n) "
read choice
if [ $choice == y ]; then
mkdir -p "${backup_dir}"
log "Starting backup."
log "Backing up /opt..."
backup_dir opt /opt
log "/opt is backed up, starting to backup nextcloud..."
backup_dir nextcloud /var/www/nextcloud
log "Nextcloud is backed up, starting to backup game servers..."
backup_dir servers /home/servers
log "Servers are backed up, starting to backup mailbox."
backup_dir mail /var/mail
log "Mail are backed up, starting to backup peertube"
backup_dir peertube /var/www/peertube
log "Peertube are backed up, starting to backup DBs..."
log "Backing up synapse DB"
backup_pg_db synapse
log "Synapse DB is backed up, backing up nextcloud DB..."
backup_pg_db nextclouddb
log "Nextcloud DB is backed up, backing up pleroma DB..."
backup_pg_db pleroma
log "Pleroma DB is backed up, backing up peerube DB..."
backup_pg_db peertube_prod
log "Peertube DB is backed up, compiling all to one and sending to pomoika..."
tar cvf "${backup_dir}.tar" "${backup_dir}"
log "Done! Now you can restore it!"
else
log "Cancelled."
fi
### to restore
#
#pg_restore -d postgres --clean --create "file backup"

10
backup/backup_manager_daxik.sh Executable file
View File

@ -0,0 +1,10 @@
#!/bin/bash
#Script that manages backups by leca@foxarmy.org
#count every dir
count=$(find . -maxdepth 1 -type d -not -name "." | wc -l)
#if there are more than 5 directories, delete the oldest one.
if [ $count -gt 5 ]; then
rm -rf $(ls -d -t1 */ | tail -n 1)
fi

View File

@ -0,0 +1,9 @@
#!/bin/bash
a=10000000
b=1000000000
for ((user = $a; user < $b; user ++))
do
curl -X POST --header "Content-Type: application/json" --data "{\"username\":\"${user}\",\"password\":\"1234\"}" https://staging.dimensionproject.net/api/register
done