mirror of
https://gitlab.sectorq.eu/jaydee/omv_backup.git
synced 2025-07-01 15:48:33 +02:00
642 lines
26 KiB
Python
642 lines
26 KiB
Python
#!/usr/bin/env python3
|
|
import datetime
|
|
import logging
|
|
from paho.mqtt import client as mqtt_client
|
|
import getopt
|
|
import json
|
|
import time
|
|
import socket
|
|
import subprocess
|
|
import sys
|
|
import os
|
|
import re
|
|
import platform
|
|
import requests
|
|
import fnmatch
|
|
import yaml
|
|
import paramiko
|
|
from wakeonlan import send_magic_packet
|
|
pid = os.getpid()
|
|
|
|
servers = ["rpi5.home.lan","nas.home.lan","rack.home.lan","nas.home.lan"]
|
|
host = platform.node().lower()
|
|
#input(host)
|
|
cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
if int(output) > 0:
|
|
print("Running already!")
|
|
sys.exit()
|
|
def is_port_open(host, port):
|
|
try:
|
|
sock = socket.create_connection((host, port))
|
|
sock.close()
|
|
return True
|
|
except socket.error:
|
|
return False
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
# doesn't even have to be reachable
|
|
conn = False
|
|
while not conn:
|
|
try:
|
|
s.connect(('192.168.77.1', 1))
|
|
IP = s.getsockname()[0]
|
|
print(IP)
|
|
print(output)
|
|
conn = True
|
|
except:
|
|
time.sleep(5)
|
|
|
|
broker = 'mqtt.home.lan'
|
|
port = 1883
|
|
topic_sum = "sectorq/amd/backups"
|
|
mqtt_username = 'jaydee'
|
|
mqtt_password = 'jaydee1'
|
|
print("1")
|
|
try:
|
|
opts, args = getopt.getopt(sys.argv[1:], "TamftDr:bd:sSO", ["command=", "help", "output="])
|
|
except getopt.GetoptError as err:
|
|
#usage()
|
|
sys.exit(2)
|
|
output = None
|
|
# QJ : getopts
|
|
_MODE = "manual"
|
|
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = _SSH_TEST = False
|
|
_EXECUTE = True
|
|
_DATE = "pick"
|
|
|
|
for o, a in opts:
|
|
if o == "-a":
|
|
_MODE = "auto"
|
|
elif o in ("-m", "--manual"):
|
|
_MODE = "manual"
|
|
elif o in ("-f", "--first"):
|
|
_FIRST = True
|
|
elif o in ("-d", "--date"):
|
|
_DATE = a
|
|
elif o in ("-t", "--test"):
|
|
_TEST = True
|
|
elif o in ("-s", "--sync"):
|
|
_SYNC = True
|
|
elif o in ("-S", "--start"):
|
|
_START = True
|
|
elif o in ("-O", "--stop"):
|
|
_STOP = True
|
|
elif o in ("-r", "--restore"):
|
|
_RESTORE = True
|
|
_APP = a
|
|
print("RESTORE")
|
|
elif o in ("-b", "--backup"):
|
|
_BACKUP = True
|
|
elif o in ("-D", "--dry"):
|
|
_EXECUTE = False
|
|
elif o in ("-T", "--dry"):
|
|
_SSH_TEST = True
|
|
_LOG_LEVEL = ""
|
|
LOG_FILE = "/var/log/omv_backup.log"
|
|
if _LOG_LEVEL == "DEBUG":
|
|
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
|
logging.debug('using debug loging')
|
|
elif _LOG_LEVEL == "ERROR":
|
|
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
|
logging.info('using error loging')
|
|
elif _LOG_LEVEL == "SCAN":
|
|
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
|
logging.info('using error loging')
|
|
else:
|
|
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
|
logging.info("script started")
|
|
|
|
logger = logging.getLogger(__name__)
|
|
print("2")
|
|
client_id = "dasdasdasd333"
|
|
try:
|
|
client = mqtt_client.Client(mqtt_client.CallbackAPIVersion.VERSION1, client_id)
|
|
except:
|
|
client = mqtt_client.Client()
|
|
client.username_pw_set(mqtt_username, mqtt_password)
|
|
|
|
|
|
backups = {
|
|
"nas": {
|
|
"login": "admin@nas.home.lan",
|
|
"jobs": {
|
|
"github":
|
|
{"source":"/share/Data/__GITHUB",
|
|
"exclude":"",
|
|
"active": True
|
|
},
|
|
"photo": {
|
|
"source":"/share/Photo/Years",
|
|
"exclude":"",
|
|
"active":True
|
|
}
|
|
}
|
|
},
|
|
"m-server":{
|
|
"login": "root@m-server.home.lan",
|
|
"jobs": {
|
|
"docker_data":{
|
|
"source":"/share/docker_data/",
|
|
"exclude":"",
|
|
"active":True
|
|
},
|
|
"fail2ban":{
|
|
"source":"/etc/fail2ban/",
|
|
"exclude":"",
|
|
"active":True
|
|
}
|
|
}
|
|
},
|
|
"rpi5.home.lan":{
|
|
"docker_data":{
|
|
"source":"/share/docker_data/",
|
|
"exclude":"",
|
|
"active":True
|
|
},
|
|
"fail2ban":{
|
|
"source":"/etc/fail2ban/",
|
|
"exclude":"",
|
|
"active":True
|
|
}
|
|
}
|
|
}
|
|
BACKUP_FS = "/media/backup/"
|
|
BACKUP_HOST = "amd.home.lan"
|
|
#BACKUP_HOST = "morefine.home.lan"
|
|
|
|
logging.info("Test connection")
|
|
hm = socket.gethostbyaddr(BACKUP_HOST)
|
|
|
|
logging.info(_RESTORE)
|
|
def send_mqtt_message(msg):
|
|
try:
|
|
client.connect(broker,1883,60)
|
|
client.publish(topic, json.dumps(msg))
|
|
client.disconnect()
|
|
except:
|
|
logging.error("Failed to send")
|
|
|
|
if _SYNC:
|
|
containers = ["HomeAssistant","webhub-web-1","heimdall","pihole","mosquitto-mosquitto-1","mailu3-redis-1","mailu3-webmail-1","mailu3-resolver-1","mailu3-antispam-1","mailu3-webdav-1","mailu3-smtp-1","mailu3-oletools-1","mailu3-front-1","mailu3-fetchmail-1","mailu3-imap-1","matter-server","piper-en","openwakeword","whisper-en","auth-worker-1","auth-server-1","auth-authentik_ldap-1","auth-redis-1","auth-postgresql-1","nginx-app-1"]
|
|
|
|
cmnd = f"curl -H 'Authorization: Bearer l4c1j4yd33Du5lo' 192.168.77.238:8094/v1/update"
|
|
logging.info(cmnd)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
|
|
if _START:
|
|
for c in containers:
|
|
cmnd = f"docker start {c}"
|
|
print(cmnd)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
if _STOP:
|
|
cmnd = "docker ps"
|
|
status, running_containers = subprocess.getstatusoutput(cmnd)
|
|
|
|
logging.info(running_containers)
|
|
for c in running_containers.splitlines():
|
|
print(c.split()[-1])
|
|
if c.split()[-1] == "watchtower-watchtower-1":
|
|
continue
|
|
cmnd = f"docker stop {c.split()[-1]}"
|
|
status, running_containers = subprocess.getstatusoutput(cmnd)
|
|
|
|
if _RESTORE:
|
|
logging.info("Starting Restore")
|
|
now = datetime.datetime.now()
|
|
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
|
if _APP == "all":
|
|
_DATE = "latest"
|
|
if host == "rpi5.home.lan":
|
|
_APP = ["nginx","ha","gitea","gitlab","mailu","bitwarden","esphome","grafana","ingluxdb","kestra","matter-server","mosquitto","octoprint","octoprint2","pihole","unify_block","webhub","homepage","watchtower"]
|
|
else:
|
|
cmnd = "ssh root@amd.home.lan 'ls /mnt/raid/backup/m-server/docker_data/latest'"
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
_APP = output.splitlines()
|
|
logging.info(_APP)
|
|
#input("????")
|
|
else:
|
|
_APP = _APP.split(",")
|
|
|
|
|
|
|
|
|
|
for app in _APP:
|
|
topic = "sectorq/amd/restore/{}".format(app)
|
|
msg = {"status":"inactive","bak_name":app,"start_time":"inactive","end_time":"inactive","progress":0}
|
|
send_mqtt_message(msg)
|
|
now = datetime.datetime.now()
|
|
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
|
|
BACKUP_HOST = f"root@amd.home.lan"
|
|
BACKUP_DEVICE = "/mnt/raid"
|
|
BACKUP_DIR = f"/backup/{host}"
|
|
|
|
|
|
if _DATE == "pick":
|
|
cmnd = f"ssh root@amd.home.lan 'ls {BACKUP_DEVICE}/backup/m-server/docker_data'"
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
#print(output)
|
|
dates = output.splitlines()
|
|
n = 1
|
|
for i in dates:
|
|
logging.info(f"{n} - {i}" )
|
|
n += 1
|
|
|
|
ans = input("Pick a backup to restore : ")
|
|
_DATE = dates[int(ans) - 1]
|
|
|
|
|
|
if app == "fail2ban":
|
|
logging.info("?>?????")
|
|
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/{_DATE}/"
|
|
SOURCE_DIR = f"/etc/fail2ban"
|
|
else:
|
|
NEW_BACKUP_DIR = f"/backup/m-server/docker_data/{_DATE}/{app}"
|
|
SOURCE_DIR = f"/share/docker_data/"
|
|
if _FIRST:
|
|
BACKUP_PATH="{}/initial".format(BACKUP_DIR)
|
|
else:
|
|
BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME)
|
|
LATEST_LINK="{}/{}".format(BACKUP_DIR,_DATE)
|
|
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/{_DATE}"
|
|
LATEST_LINK = f"/{host}/{app}/{_DATE}"
|
|
|
|
|
|
msg = {"status":"started","bak_name":app,"start_time":DATETIME,"end_time":"in progress", "progress":0}
|
|
send_mqtt_message(msg)
|
|
logging.info("Create backup dir")
|
|
logging.info(cmnd)
|
|
|
|
|
|
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
|
|
|
|
|
|
if app == "heimdall":
|
|
logging.info("Stopping docker")
|
|
cmnd = "docker stop heimdall"
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
logging.info("Sync files")
|
|
if _TEST:
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"]
|
|
for e in entries:
|
|
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\""
|
|
logging.info(cmnd)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}')
|
|
contents = re.sub(regex, IP , output)
|
|
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\""
|
|
logging.info(cmnd)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
# cmnd = "docker start heimdall"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
|
|
|
|
|
|
|
|
if app == "ha":
|
|
logging.info("Stopping docker")
|
|
cmnd = "docker stop heimdall"
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
logging.info("Sync files")
|
|
if _TEST:
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
logging.info("Start docker")
|
|
# cmnd = "docker start heimdall"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
elif app == "fail2ban":
|
|
logging.info("Stopping docker")
|
|
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
logging.info("Sync files")
|
|
if _TEST:
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
logging.info("Start docker")
|
|
# cmnd = "docker start heimdall"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
elif app == "homepage":
|
|
logging.info("Stopping docker")
|
|
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
|
|
if _TEST:
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
|
|
file = "/share/docker_data/homepage/config/widgets.yaml"
|
|
with open(file, 'r') as stream:
|
|
try:
|
|
loaded = yaml.load(stream, Loader=yaml.FullLoader)
|
|
except yaml.YAMLError as exc:
|
|
logging.info(exc)
|
|
|
|
# Modify the fields from the dict
|
|
#loaded['logo']['icon'] = "/images/morefine2.png"
|
|
logging.info(json.dumps(loaded, indent=2))
|
|
i = 0
|
|
for y in loaded:
|
|
logging.info(i)
|
|
logging.info(y)
|
|
|
|
if "logo" in y:
|
|
if host == "rpi5.home.lan":
|
|
loaded[i]['logo']['icon'] = "/images/rpi5.png"
|
|
elif host == "nas.home.lan":
|
|
loaded[i]['logo']['icon'] = "/images/qnap_nas.png"
|
|
else:
|
|
loaded[i]['logo']['icon'] = "/images/morefine2.png"
|
|
i+=1
|
|
|
|
# Save it again
|
|
logging.info(f"writing to file {file}")
|
|
with open(file, 'w') as stream:
|
|
try:
|
|
yaml.dump(loaded, stream, default_flow_style=False)
|
|
except yaml.YAMLError as exc:
|
|
print("failed")
|
|
print(exc)
|
|
|
|
|
|
|
|
logging.info("Start docker")
|
|
# cmnd = "docker start heimdall"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
elif app == "nginx1":
|
|
logging.info("Stopping docker")
|
|
cmnd = "docker stop nginx-app-1"
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
logging.info("Sync files")
|
|
if _TEST:
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"]
|
|
for d in domains:
|
|
cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"'
|
|
logging.info(cmnd)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
|
|
cmnd = 'egrep -l "# bazarr.sectorq.eu|# gitea.sectorq.eu|# jf.sectorq.eu|# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
logging.info(output.splitlines())
|
|
for file in output.splitlines():
|
|
logging.info(file)
|
|
f = open(file)
|
|
contents = f.read()
|
|
f.close()
|
|
regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";')
|
|
contents = re.sub(regex, f'\n set $server \"{IP}\";', contents)
|
|
#print(contents)
|
|
logging.info(regex)
|
|
f = open(file, "w")
|
|
contents = f.write(contents)
|
|
f.close()
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
logging.info("Starting docker")
|
|
# cmnd = "docker start nginx-app-1"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
else:
|
|
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
logging.info("Sync files")
|
|
if _TEST:
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
now = datetime.datetime.now()
|
|
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
|
msg = {"status":"finished","bak_name":app,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
|
|
send_mqtt_message(msg)
|
|
|
|
now = datetime.datetime.now()
|
|
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
|
logging.info("Sending finished status")
|
|
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":"?"}
|
|
logging.info(msg)
|
|
send_mqtt_message(msg)
|
|
|
|
if _MODE == "auto":
|
|
cmnd = "ssh root@amd.home.lan 'systemctl suspend &'"
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
|
|
|
|
|
|
if _BACKUP:
|
|
|
|
while True:
|
|
directory = '/backups/'
|
|
count = len(fnmatch.filter(os.listdir(directory), '*'))
|
|
|
|
logging.info('File Count:', count)
|
|
if count == 0:
|
|
time.sleep(10)
|
|
continue
|
|
else:
|
|
finished = []
|
|
now = datetime.datetime.now()
|
|
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
|
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":"","cur_job":"","start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
|
send_mqtt_message(msg)
|
|
# iterate over files in
|
|
# that directory
|
|
|
|
for filename in os.scandir(directory):
|
|
if filename.is_file():
|
|
logging.info(filename.path)
|
|
logging.info(filename.name)
|
|
host = filename.name
|
|
logging.info("Backup")
|
|
for b in backups[host]["jobs"]:
|
|
topic = "sectorq/amd/backups"
|
|
if not backups[host]["jobs"][b]["active"]:
|
|
logging.info("Backup {} is not active!".format(b))
|
|
msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0}
|
|
send_mqtt_message(msg)
|
|
continue
|
|
|
|
SOURCE_DIR = backups[host]["jobs"][b]["source"]
|
|
now = datetime.datetime.now()
|
|
BACKUP_HOST = backups[host]["login"]
|
|
BACKUP_DEVICE = "/mnt/raid"
|
|
BACKUP_DIR = f"{BACKUP_HOST}:{SOURCE_DIR}"
|
|
BACKUP_ROOT = f"{BACKUP_DEVICE}/backup/{host}/{b}"
|
|
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
|
|
|
|
if _FIRST:
|
|
NEW_BACKUP_DIR = f"{BACKUP_ROOT}/initial"
|
|
else:
|
|
NEW_BACKUP_DIR = f"{BACKUP_ROOT}/{DATETIME}"
|
|
|
|
FULL_BACKUP_LATEST = f"{BACKUP_ROOT}/latest"
|
|
|
|
|
|
# msg = {"status":"started","bak_name":b,"start_time":DATETIME,"end_time":"in progress", "progress":0}
|
|
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
|
client.connect(broker,1883,60)
|
|
client.publish(topic, json.dumps(msg),qos=0, retain=True)
|
|
client.disconnect()
|
|
|
|
cmnd = "mkdir -p " + NEW_BACKUP_DIR
|
|
logging.info(cmnd)
|
|
if _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
logging.info(output)
|
|
logging.info(status)
|
|
logging.info("Create backup dir")
|
|
|
|
|
|
|
|
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
|
|
if _FIRST:
|
|
cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
|
|
else:
|
|
cmnd = f"rsync -avz --delete {BACKUP_DIR} --link-dest {FULL_BACKUP_LATEST} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
|
|
|
|
ans = "y"
|
|
logging.info(cmnd)
|
|
logging.info("Sync files")
|
|
#input("??????")
|
|
if _TEST:
|
|
|
|
ans = input("continue?") or "n"
|
|
if ans == "y" and _EXECUTE:
|
|
|
|
# rsync --info=progress2 -avz --delete /share/docker_data/ --link-dest /m-server/docker_data/latest --exclude="gitlab/data/" --exclude="esphome/config/.esphome" --exclude="gitlab/logs/prometheus" --exclude=".cache" --exclude=".git" --exclude="var_lib_motioneye" /m-server/m-server/docker_data/newone1
|
|
|
|
|
|
# input("????")
|
|
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
#proc = subprocess.Popen(cmnd,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd = "/myapps/",shell=True)
|
|
|
|
|
|
|
|
cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
|
|
|
|
logging.info(cmnd)
|
|
logging.info("Removing latest link")
|
|
# input("????")
|
|
if _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
if _FIRST:
|
|
cmnd = f"cd {BACKUP_ROOT}; ln -s initial latest"
|
|
else:
|
|
cmnd = f"cd {BACKUP_ROOT}; ln -s {DATETIME} latest"
|
|
logging.info("Creating new latest link")
|
|
#print(cmnd)
|
|
# input("????")
|
|
if _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
|
|
|
|
#Remove old
|
|
logging.info("Removing old dirs")
|
|
# input("????")
|
|
#cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR)
|
|
cmnd = f"cd {BACKUP_ROOT} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
|
|
#print(cmnd)
|
|
# input("????")
|
|
if _EXECUTE:
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
now = datetime.datetime.now()
|
|
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
|
#msg = {"status":"finished","bak_name":b,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
|
|
finished.append(b)
|
|
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":ENDTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
|
send_mqtt_message(msg)
|
|
|
|
logging.info("Getting size of FS")
|
|
cmnd = "df -h /mnt/raid|awk '{ print $3 }'|tail -1"
|
|
logging.info(cmnd)
|
|
status, output = subprocess.getstatusoutput(cmnd)
|
|
used_space = (output.split())[0]
|
|
now = datetime.datetime.now()
|
|
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
|
logging.info("Size : {}".format(used_space))
|
|
logging.info("Sending finished status")
|
|
#msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
|
|
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":ENDTIME,"progress":0,"finished":",".join(finished),"used_space":used_space}
|
|
logging.info(msg)
|
|
|
|
send_mqtt_message(msg)
|
|
os.remove(filename.path)
|
|
|
|
for s in servers:
|
|
if s == "m-server.home.lan":
|
|
continue
|
|
ssh = paramiko.SSHClient()
|
|
ssh.load_system_host_keys()
|
|
# Add SSH host key automatically if needed.
|
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
# Connect to router using username/password authentication.
|
|
ssh.connect(s,
|
|
username="jd",
|
|
look_for_keys=True,
|
|
allow_agent=False,
|
|
key_filename="/root/.ssh/id_rsa")
|
|
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('touch /tmp/lala')
|
|
ssh.close()
|
|
# if _MODE == "auto":
|
|
# hostup = True
|
|
# cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
# while hostup:
|
|
# #HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
|
|
# cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
|
|
# status, output = subprocess.getstatusoutput(cmnd)
|
|
# # print(status)
|
|
# # print(output)
|
|
|
|
|
|
# if status == 0:
|
|
# print(f"Backup host up, waiting - {n}\r", end="")
|
|
# time.sleep(5)
|
|
# n += 1
|
|
# else:
|
|
# print("Backup host down " )
|
|
# hostup = False
|
|
|
|
|
|
|
|
# try:
|
|
# url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=off"
|
|
|
|
# x = requests.post(url)
|
|
|
|
# print(x.text)
|
|
# except:
|
|
# pass
|
|
if _SSH_TEST:
|
|
for s in servers:
|
|
if s == "m-server.home.lan":
|
|
continue
|
|
ssh = paramiko.SSHClient()
|
|
ssh.load_system_host_keys()
|
|
# Add SSH host key automatically if needed.
|
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
# Connect to router using username/password authentication.
|
|
ssh.connect(s,
|
|
username="jd",
|
|
look_for_keys=True,
|
|
allow_agent=False,
|
|
key_filename="/root/.ssh/id_rsa")
|
|
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('touch /tmp/lala')
|
|
ssh.close() |