Compare commits

...

6 Commits

Author SHA1 Message Date
8c53b07cf5 added v3 2025-05-05 22:20:27 +02:00
420423066a build 2025-05-05 18:34:57 +02:00
02999e5619 build 2025-05-05 18:34:30 +02:00
a414a04bd0 build 2025-05-05 18:32:42 +02:00
0365c5e4e2 build 2025-05-05 18:29:43 +02:00
1ee67ce3e9 build 2025-05-05 18:27:55 +02:00
2 changed files with 573 additions and 326 deletions

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3 #!/myapps/venv/bin/python3
import datetime import datetime
import logging import logging
from paho.mqtt import client as mqtt_client from paho.mqtt import client as mqtt_client
@ -7,19 +7,39 @@ import json
import time import time
import socket import socket
import subprocess import subprocess
from subprocess import Popen, PIPE, CalledProcessError
import sys import sys
import os import os
import re import re
import platform import platform
import requests import requests
import fnmatch import fnmatch
import yaml
import paramiko
import numpy as np
file_path = os.path.realpath(__file__)
dir_path = os.path.dirname(file_path)
VERSION="1.0.3"
# print(file_path)
# print(dir_path)
os.chdir(dir_path)
from wakeonlan import send_magic_packet from wakeonlan import send_magic_packet
pid = os.getpid() pid = os.getpid()
def is_port_open(host, port):
try:
sock = socket.create_connection((host, port))
sock.close()
return True
except socket.error:
return False
servers = ["rpi5.home.lan","nas.home.lan","rack.home.lan","m-server.home.lan"]
host = platform.node().lower() host = platform.node().lower()
#input(host)
cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid) cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid)
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
if int(output) > 0:
print("Running already!")
sys.exit()
def is_port_open(host, port): def is_port_open(host, port):
try: try:
sock = socket.create_connection((host, port)) sock = socket.create_connection((host, port))
@ -29,59 +49,91 @@ def is_port_open(host, port):
return False return False
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# doesn't even have to be reachable # doesn't even have to be reachable
conn = False
while not conn:
try:
s.connect(('192.168.77.1', 1)) s.connect(('192.168.77.1', 1))
IP = s.getsockname()[0] IP = s.getsockname()[0]
print(IP) conn = True
print(output) except:
if int(output) > 0: time.sleep(5)
print("Running already!")
sys.exit()
broker = 'mqtt.home.lan' broker = 'mqtt.home.lan'
port = 1883 port = 1883
topic_sum = "sectorq/omv/backups" topic_sum = "sectorq/amd/backups"
mqtt_username = 'jaydee' mqtt_username = 'jaydee'
mqtt_password = 'jaydee1' mqtt_password = 'jaydee1'
print("1")
try: try:
opts, args = getopt.getopt(sys.argv[1:], "amftdr:b", ["command=", "help", "output="]) opts, args = getopt.getopt(sys.argv[1:], "hTamftDr:bd:sSOl:", ["command=", "help", "output="])
except getopt.GetoptError as err: except getopt.GetoptError as err:
#usage() #usage()
sys.exit(2) sys.exit(2)
output = None output = None
# QJ : getopts # QJ : getopts
_MODE = "manual" _MODE = "manual"
_FIRST = _TEST = _RESTORE = _BACKUP = False _FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = _SSH_TEST = False
_EXECUTE = True _EXECUTE = True
_DATE = "pick"
_LOG_LEVEL = ""
for o, a in opts: for o, a in opts:
if o == "-a": if o == "-a":
_MODE = "auto" _MODE = "auto"
elif o in ("-m", "--manual"): elif o in ("-m", "--manual"):
_MODE = "manual" _MODE = "manual"
elif o in ("-l", "--level"):
_LOG_LEVEL = a.upper()
elif o in ("-f", "--first"): elif o in ("-f", "--first"):
_FIRST = True _FIRST = True
elif o in ("-d", "--date"):
_DATE = a
elif o in ("-t", "--test"): elif o in ("-t", "--test"):
_TEST = True _TEST = True
elif o in ("-s", "--sync"):
_SYNC = True
elif o in ("-S", "--start"):
_START = True
elif o in ("-O", "--stop"):
_STOP = True
elif o in ("-r", "--restore"): elif o in ("-r", "--restore"):
_RESTORE = True _RESTORE = True
_APP = a _APP = a
print("RESTORE") print("RESTORE")
elif o in ("-b", "--backup"): elif o in ("-b", "--backup"):
_BACKUP = True _BACKUP = True
elif o in ("-d", "--dry"): elif o in ("-D", "--dry"):
_EXECUTE = False _EXECUTE = False
elif o in ("-T", "--dry"):
_SSH_TEST = True
elif o in ("-h", "--help"):
print(VERSION)
sys.exit()
LOG_FILE = "omv_backup.log"
if _LOG_LEVEL == "DEBUG":
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.debug('using debug loging')
elif _LOG_LEVEL == "ERROR":
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('using error loging')
elif _LOG_LEVEL == "SCAN":
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('using error loging')
else:
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info("script started")
print("2") logger = logging.getLogger(__name__)
client_id = "dasdasdasd333"
try:
client = mqtt_client.Client(mqtt_client.CallbackAPIVersion.VERSION1, client_id)
except:
client = mqtt_client.Client() client = mqtt_client.Client()
client.username_pw_set(mqtt_username, mqtt_password) client.username_pw_set(mqtt_username, mqtt_password)
client.connect(broker,1883,60)
now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
msg = {"mode":_MODE, "status":"started","bak_name":"complete","start_time":STARTTIME,"end_time":"in progress","progress":0}
client.publish(topic_sum, json.dumps(msg));
client.disconnect()
backups = { backups = {
"nas": { "nas": {
"login": "admin@nas.home.lan",
"jobs": {
"github": "github":
{"source":"/share/Data/__GITHUB", {"source":"/share/Data/__GITHUB",
"exclude":"", "exclude":"",
@ -91,9 +143,12 @@ backups = {
"source":"/share/Photo/Years", "source":"/share/Photo/Years",
"exclude":"", "exclude":"",
"active":True "active":True
}, }
}
}, },
"m-server":{ "m-server":{
"login": "root@m-server.home.lan",
"jobs": {
"docker_data":{ "docker_data":{
"source":"/share/docker_data/", "source":"/share/docker_data/",
"exclude":"", "exclude":"",
@ -104,6 +159,7 @@ backups = {
"exclude":"", "exclude":"",
"active":True "active":True
} }
}
}, },
"rpi5.home.lan":{ "rpi5.home.lan":{
"docker_data":{ "docker_data":{
@ -118,162 +174,236 @@ backups = {
} }
} }
} }
BACKUP_FS = "/srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8"
BACKUP_HOST = "omv.home.lan" BACKUP_FS = "/media/backup/"
BACKUP_HOST = "amd.home.lan"
#BACKUP_HOST = "morefine.home.lan" #BACKUP_HOST = "morefine.home.lan"
if not host in backups and _BACKUP:
print(f"No backup jobs for {host}") logging.info("Test connection")
sys.exit()
print("Test connection")
print("3")
hm = socket.gethostbyaddr(BACKUP_HOST) hm = socket.gethostbyaddr(BACKUP_HOST)
hostdown = True logging.info(_RESTORE)
n=0 def send_mqtt_message(msg):
try: try:
url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=on" client.connect(broker,1883,60)
client.publish(topic, json.dumps(msg))
client.disconnect()
except ValueError as e:
logging.error("Failed to send")
print("Failed to send")
print(e)
x = requests.post(url) if _SYNC:
containers = ["HomeAssistant","webhub-web-1","heimdall","pihole","mosquitto-mosquitto-1","mailu3-redis-1","mailu3-webmail-1","mailu3-resolver-1","mailu3-antispam-1","mailu3-webdav-1","mailu3-smtp-1","mailu3-oletools-1","mailu3-front-1","mailu3-fetchmail-1","mailu3-imap-1","matter-server","piper-en","openwakeword","whisper-en","auth-worker-1","auth-server-1","auth-authentik_ldap-1","auth-redis-1","auth-postgresql-1","nginx-app-1"]
print(x.text) cmnd = f"curl -H 'Authorization: Bearer l4c1j4yd33Du5lo' 192.168.77.238:8094/v1/update"
except: logging.info(cmnd)
pass
while hostdown:
#HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
# print(status)
# print(output)
if _START:
for c in containers:
cmnd = f"docker start {c}"
print(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
if _STOP:
cmnd = "docker ps"
status, running_containers = subprocess.getstatusoutput(cmnd)
if status != 0: logging.info(running_containers)
send_magic_packet('88:c9:b3:b5:23:d8') for c in running_containers.splitlines():
print(f"Backup host down, waiting - {n}\r", end="") print(c.split()[-1])
time.sleep(5) if c.split()[-1] == "watchtower-watchtower-1":
n += 1 continue
else: cmnd = f"docker stop {c.split()[-1]}"
print("Backup host up " ) status, running_containers = subprocess.getstatusoutput(cmnd)
hostdown = False
port = 22 # Replace with the port you want to test
n=0
while not is_port_open(BACKUP_HOST, port):
print(f"Port {port} on {BACKUP_HOST} is closed. {n}\r", end="")
time.sleep(5)
n += 1
print(f"Port {port} on {BACKUP_HOST} is open.")
print("Starting")
print(_RESTORE)
if _RESTORE: if _RESTORE:
logging.info("Starting Restore")
print("Starting Restore") print("Starting Restore")
now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
if _APP == "all": if _APP == "all":
_APP = ["nginx","ha","gitlab","mailu","bitwarden","esphome","grafana","ingluxdb","kestra","matter-server","mosquitto","octoprint","octoprint2","pihole","unify_block","webhub"] _DATE = "latest"
if host == "rpi5.home.lan":
_APP = ["nginx","ha","gitea","gitlab","mailu","bitwarden","esphome","grafana","ingluxdb","kestra","matter-server","mosquitto","octoprint","octoprint2","pihole","unify_block","webhub","homepage","watchtower"]
else:
cmnd = "ssh root@amd.home.lan 'ls /mnt/raid/backup/m-server/docker_data/latest'"
status, output = subprocess.getstatusoutput(cmnd)
_APP = output.splitlines()
logging.info(_APP)
#input("????")
else: else:
_APP = _APP.split(",") _APP = _APP.split(",")
for app in _APP:
topic = "sectorq/omv/restore/{}".format(app)
client.connect(broker,1883,60)
msg = {"status":"inactive","bak_name":app,"start_time":"inactive","end_time":"inactive","progress":0}
client.publish(topic, json.dumps(msg)) PROGRESS = 0
client.disconnect() topic = "sectorq/amd/restore"
step = 100 / len(_APP)
for app in _APP:
msg = {"mode":_MODE, "status":"restore","bak_name":"Restore","host":host,"cur_job":app,"start_time":STARTTIME,"end_time":"","progress":str(round(np.ceil(PROGRESS))) + "%","finished":1,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
PROGRESS = PROGRESS + step
now = datetime.datetime.now() now = datetime.datetime.now()
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S") DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
BACKUP_HOST = "root@omv.home.lan" BACKUP_HOST = f"root@amd.home.lan"
BACKUP_DEVICE = "/srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8" BACKUP_DEVICE = "/mnt/raid"
BACKUP_DIR = f"/backup/{host}" BACKUP_DIR = f"/backup/{host}"
if _DATE == "pick":
cmnd = f"ssh root@amd.home.lan 'ls {BACKUP_DEVICE}/backup/m-server/docker_data'"
status, output = subprocess.getstatusoutput(cmnd)
#print(output)
dates = output.splitlines()
n = 1
for i in dates:
logging.info(f"{n} - {i}" )
n += 1
ans = input("Pick a backup to restore : ")
_DATE = dates[int(ans) - 1]
if app == "fail2ban": if app == "fail2ban":
print("?>?????") logging.info("?>?????")
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/latest/" NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/{_DATE}/"
SOURCE_DIR = f"/etc/fail2ban" SOURCE_DIR = f"/etc/fail2ban"
else: else:
NEW_BACKUP_DIR = f"/backup/m-server/docker_data/latest/{app}" NEW_BACKUP_DIR = f"/backup/m-server/docker_data/{_DATE}/{app}"
SOURCE_DIR = f"/share/docker_data/" SOURCE_DIR = f"/share/docker_data/"
if _FIRST: if _FIRST:
BACKUP_PATH="{}/initial".format(BACKUP_DIR) BACKUP_PATH="{}/initial".format(BACKUP_DIR)
else: else:
BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME) BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME)
LATEST_LINK="{}/latest".format(BACKUP_DIR) LATEST_LINK="{}/{}".format(BACKUP_DIR,_DATE)
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/latest" FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/{_DATE}"
LATEST_LINK = f"/{host}/{app}/latest" LATEST_LINK = f"/{host}/{app}/{_DATE}"
logging.info("Create backup dir")
msg = {"status":"started","bak_name":app,"start_time":DATETIME,"end_time":"in progress", "progress":0} logging.info(cmnd)
client.connect(broker,1883,60)
client.publish(topic, json.dumps(msg))
client.disconnect()
print("Create backup dir")
print(cmnd)
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH) #cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
if app == "heimdall": if app == "heimdall":
print("Stopping docker") logging.info("Stopping docker")
cmnd = "docker stop heimdall" cmnd = "docker stop heimdall"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}" cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y" ans = "y"
print(cmnd) logging.info(cmnd)
print("Sync files") logging.info("Sync files")
if _TEST: if _TEST:
ans = input("continue?") or "n" ans = input("continue?") or "n"
if ans == "y" and _EXECUTE: if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"] entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"]
for e in entries: for e in entries:
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\"" cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\""
print(cmnd) logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}') regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}')
contents = re.sub(regex, IP , output) contents = re.sub(regex, IP , output)
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\"" cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\""
print(cmnd) logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
cmnd = "docker start heimdall"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
# cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
if app == "ha": if app == "ha":
print("Stopping docker") logging.info("Stopping docker")
cmnd = "docker stop heimdall" cmnd = "docker stop heimdall"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}" cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y" ans = "y"
print(cmnd) logging.info(cmnd)
print("Sync files") logging.info("Sync files")
if _TEST: if _TEST:
ans = input("continue?") or "n" ans = input("continue?") or "n"
if ans == "y" and _EXECUTE: if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
print("Start docker") logging.info("Start docker")
cmnd = "docker start heimdall" # cmnd = "docker start heimdall"
status, output = subprocess.getstatusoutput(cmnd) # status, output = subprocess.getstatusoutput(cmnd)
if app == "fail2ban": elif app == "fail2ban":
print("Stopping docker") logging.info("Stopping docker")
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}" cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y" ans = "y"
print(cmnd) logging.info(cmnd)
print("Sync files") logging.info("Sync files")
if _TEST: if _TEST:
ans = input("continue?") or "n" ans = input("continue?") or "n"
if ans == "y" and _EXECUTE: if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
print("Start docker") logging.info("Start docker")
cmnd = "docker start heimdall" # cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
elif app == "homepage":
logging.info("Stopping docker")
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
logging.info(cmnd)
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
elif app == "nginx":
print("Stopping docker") file = "/share/docker_data/homepage/config/widgets.yaml"
with open(file, 'r') as stream:
try:
loaded = yaml.load(stream, Loader=yaml.FullLoader)
except yaml.YAMLError as exc:
logging.info(exc)
# Modify the fields from the dict
#loaded['logo']['icon'] = "/images/morefine2.png"
logging.info(json.dumps(loaded, indent=2))
i = 0
for y in loaded:
logging.info(i)
logging.info(y)
if "logo" in y:
if host == "rpi5.home.lan":
loaded[i]['logo']['icon'] = "/images/rpi5.png"
elif host == "nas.home.lan":
loaded[i]['logo']['icon'] = "/images/qnap_nas.png"
else:
loaded[i]['logo']['icon'] = "/images/morefine2.png"
i+=1
# Save it again
logging.info(f"writing to file {file}")
with open(file, 'w') as stream:
try:
yaml.dump(loaded, stream, default_flow_style=False)
except yaml.YAMLError as exc:
print("failed")
print(exc)
logging.info("Start docker")
# cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
elif app == "nginx1":
logging.info("Stopping docker")
cmnd = "docker stop nginx-app-1" cmnd = "docker stop nginx-app-1"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}" cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y" ans = "y"
print(cmnd) logging.info(cmnd)
print("Sync files") logging.info("Sync files")
if _TEST: if _TEST:
ans = input("continue?") or "n" ans = input("continue?") or "n"
if ans == "y" and _EXECUTE: if ans == "y" and _EXECUTE:
@ -281,151 +411,191 @@ if _RESTORE:
domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"] domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"]
for d in domains: for d in domains:
cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"' cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"'
print(cmnd) logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
cmnd = 'egrep -l "# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*' cmnd = 'egrep -l "# bazarr.sectorq.eu|# gitea.sectorq.eu|# jf.sectorq.eu|# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
print(output.splitlines()) logging.info(output.splitlines())
for file in output.splitlines(): for file in output.splitlines():
print(file) logging.info(file)
f = open(file) f = open(file)
contents = f.read() contents = f.read()
f.close() f.close()
regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";') regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";')
contents = re.sub(regex, f'\n set $server \"{IP}\";', contents) contents = re.sub(regex, f'\n set $server \"{IP}\";', contents)
#print(contents) #print(contents)
print(regex) logging.info(regex)
f = open(file, "w") f = open(file, "w")
contents = f.write(contents) contents = f.write(contents)
f.close() f.close()
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
print("Starting docker") logging.info("Starting docker")
cmnd = "docker start nginx-app-1" # cmnd = "docker start nginx-app-1"
status, output = subprocess.getstatusoutput(cmnd) # status, output = subprocess.getstatusoutput(cmnd)
else: else:
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}" cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y" ans = "y"
print(cmnd) logging.info(cmnd)
print("Sync files") logging.info("Sync files")
if _TEST: if _TEST:
ans = input("continue?") or "n" ans = input("continue?") or "n"
if ans == "y" and _EXECUTE: if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
now = datetime.datetime.now()
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
msg = {"status":"finished","bak_name":app,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
client.connect(broker,1883,10)
client.publish(topic, json.dumps(msg))
client.disconnect()
now = datetime.datetime.now() now = datetime.datetime.now()
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S") ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
print("Sending finished status") logging.info("Sending finished status")
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":"?"}
print(msg)
client.connect(broker,1883,10)
client.publish(topic_sum, json.dumps(msg))
client.disconnect()
msg = {"mode":_MODE, "status":"restore","bak_name":"Restore","host":host,"cur_job":app,"start_time":STARTTIME,"end_time":"","progress":100,"finished":ENDJOB,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
if _MODE == "auto": if _MODE == "auto":
cmnd = "ssh root@omv.home.lan 'systemctl suspend &'" cmnd = "ssh root@amd.home.lan 'systemctl suspend &'"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
if _BACKUP: if _BACKUP:
last = 1
while True: while True:
directory = '/backups/' directory = '/backups/'
count = len(fnmatch.filter(os.listdir(directory), '*.*')) count = len(fnmatch.filter(os.listdir(directory), '*'))
print('File Count:', count) if last != count:
logging.info(f'File Count: {count}')
last = count
if count == 0:
time.sleep(10) time.sleep(10)
continue continue
else:
finished = []
now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
topic = "sectorq/amd/backups"
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":"","cur_job":"","start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
send_mqtt_message(msg)
# iterate over files in # iterate over files in
# that directory # that directory
for filename in os.scandir(directory): for filename in os.scandir(directory):
if filename.is_file(): if filename.is_file():
logging.info(filename.path)
logging.info(filename.name)
host = filename.name
logging.info("Backup")
for b in backups[host]["jobs"]:
print(filename.path) if not backups[host]["jobs"][b]["active"]:
print("Backup") logging.info("Backup {} is not active!".format(b))
for b in backups[host]:
topic = "sectorq/omv/backups/{}".format(b.lower())
if not backups[host][b]["active"]:
print("Backup {} is not active!".format(b))
client.connect(broker,1883,60)
msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0} msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0}
send_mqtt_message(msg)
client.publish(topic, json.dumps(msg))
client.disconnect()
continue continue
SOURCE_DIR = backups[host][b]["source"] SOURCE_DIR = backups[host]["jobs"][b]["source"]
now = datetime.datetime.now() now = datetime.datetime.now()
BACKUP_HOST = "root@omv.home.lan" BACKUP_HOST = backups[host]["login"]
BACKUP_DEVICE = "/srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8" BACKUP_DEVICE = "/mnt/raid"
BACKUP_DIR = f"/backup/{host}/{b}" BACKUP_DIR = f"{BACKUP_HOST}:{SOURCE_DIR}"
NEW_BACKUP_DIR = f"{BACKUP_DEVICE}/backup/{host}/{b}" BACKUP_ROOT = f"{BACKUP_DEVICE}/backup/{host}/{b}"
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S") DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
if _FIRST: if _FIRST:
BACKUP_PATH="{}/initial".format(BACKUP_DIR) NEW_BACKUP_DIR = f"{BACKUP_ROOT}/initial"
else: else:
BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME) NEW_BACKUP_DIR = f"{BACKUP_ROOT}/{DATETIME}_running"
LATEST_LINK="{}/latest".format(BACKUP_DIR)
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/latest"
LATEST_LINK = f"/{host}/{b}/latest"
FULL_BACKUP_LATEST = f"{BACKUP_ROOT}/latest"
msg = {"status":"started","bak_name":b,"start_time":DATETIME,"end_time":"in progress", "progress":0} # msg = {"status":"started","bak_name":b,"start_time":DATETIME,"end_time":"in progress", "progress":0}
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
client.connect(broker,1883,60) client.connect(broker,1883,60)
client.publish(topic, json.dumps(msg)) client.publish(topic, json.dumps(msg),qos=0, retain=True)
client.disconnect() client.disconnect()
cmnd = "ssh root@omv.home.lan 'mkdir -p " + NEW_BACKUP_DIR + "'"
cmnd = "mkdir -p " + NEW_BACKUP_DIR
logging.info(cmnd)
if _EXECUTE: if _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
print("Create backup dir") logging.info(output)
print(cmnd) logging.info(status)
logging.info("Create backup dir")
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
if _FIRST:
cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
else:
cmnd = f"rsync -avz --delete {SOURCE_DIR} --link-dest {LATEST_LINK} --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
ans = "y"
print(cmnd)
print("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
# rsync --info=progress2 -avz --delete /share/docker_data/ --link-dest /m-server/docker_data/latest --exclude="gitlab/data/" --exclude="esphome/config/.esphome" --exclude="gitlab/logs/prometheus" --exclude=".cache" --exclude=".git" --exclude="var_lib_motioneye" /m-server/m-server/docker_data/newone1
# input("????")
cmnd = f"ssh {BACKUP_HOST} 'ls {SOURCE_DIR}'"
logger.debug(cmnd)
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
logger.debug(output)
apps = output.splitlines()
c = len(apps)
print(apps)
print(len(apps))
cmnd = f"ssh root@omv.home.lan 'rm -rf {FULL_BACKUP_LATEST}'" step = round(100 / c,1)
progress = 0
#cmd = f"rsync -avz --delete {BACKUP_DIR} --link-dest {FULL_BACKUP_LATEST}/ --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
#cmd = [ 'rsync', '-avz','--info=progress2', BACKUP_DIR , NEW_BACKUP_DIR]
#print(cmnd) cmd = ['rsync', '-avz', '--delete', BACKUP_DIR, '--link-dest', FULL_BACKUP_LATEST, '--exclude="jellyfin/cache/transcodes"', '--exclude=".@__thumb/"', '--exclude="gitlab/logs/prometheus"', '--exclude="home-assistant.log"', '--exclude="gitlab/logs/*"', '--exclude="esphome/config/.esphome"', '--exclude=".cache"', '--exclude=".git"', '--exclude="var_lib_motioneye"', NEW_BACKUP_DIR]
print("Removing latest link") logging.info(" ".join(cmd))
process = subprocess.Popen(cmd,
stdout=subprocess.PIPE)
while process.poll() is None:
line = process.stdout.readline().decode("utf-8").split("/")
print(line[0])
if line[0] in apps:
logging.info(f"Working on app {line[0]}")
while True:
if line[0] != apps[0]:
del apps[0]
progress = progress + step
else:
break
apps.remove(line[0])
#print(len(apps))
topic = "sectorq/amd/backups"
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":line[0],"start_time":STARTTIME,"end_time":"in progress","progress":str(round(progress)) + "%","finished":",".join(finished)}
send_mqtt_message(msg)
progress = progress + step
# input(apps)
# for a in apps:
# logging.info(f"App {a}")
# topic = "sectorq/amd/backups"
# msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
# send_mqtt_message(msg)
# logger.debug(cmnd)
# if _FIRST:
# cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
# else:
# cmnd = f"rsync -avz --delete {BACKUP_DIR}{a} --link-dest {FULL_BACKUP_LATEST}/{a} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
# ans = "y"
# logging.info(cmnd)
# logging.info("Sync files1")
# #input("??????")
# if _TEST:
# ans = input("continue?") or "n"
# if ans == "y" and _EXECUTE:
# status, output = subprocess.getstatusoutput(cmnd)
# #proc = subprocess.Popen(cmnd,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd = "/myapps/",shell=True)
# progress = progress + step
# topic = "sectorq/amd/backups"
# msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
# send_mqtt_message(msg)
cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
logging.info(cmnd)
logging.info("Removing latest link")
# input("????") # input("????")
if _EXECUTE: if _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
if _FIRST: if _FIRST:
cmnd = f"ssh root@omv.home.lan 'cd {NEW_BACKUP_DIR}; ln -s initial latest'" cmnd = f"cd {BACKUP_ROOT}; ln -s initial latest"
else: else:
cmnd = f"ssh root@omv.home.lan 'cd {NEW_BACKUP_DIR}; ln -s {DATETIME} latest'" cmnd = f"cd {BACKUP_ROOT}; mv {DATETIME}_running {DATETIME};ln -s {DATETIME} latest"
print("Creating new latest link") logging.info("Creating new latest link")
#print(cmnd) #print(cmnd)
# input("????") # input("????")
if _EXECUTE: if _EXECUTE:
@ -433,66 +603,143 @@ if _BACKUP:
#Remove old #Remove old
print("Removing old dirs") logging.info("Removing old dirs")
# input("????") # input("????")
#cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR) #cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR)
cmnd = f"cd {NEW_BACKUP_DIR} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;" cmnd = f"cd {BACKUP_ROOT} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
#print(cmnd) #print(cmnd)
# input("????") # input("????")
if _EXECUTE: if _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
now = datetime.datetime.now() now = datetime.datetime.now()
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S") ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
msg = {"status":"finished","bak_name":b,"start_time":DATETIME,"end_time":ENDTIME,"progress":0} #msg = {"status":"finished","bak_name":b,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
client.connect(broker,1883,10) finished.append(b)
client.publish(topic, json.dumps(msg)) msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":ENDTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
client.disconnect() send_mqtt_message(msg)
print("Getting size of FS") logging.info("Getting size of FS")
#cmnd = "du -h --max-depth=0 {}".format(BACKUP_FS) cmnd = "df -h /mnt/raid|awk '{ print $3 }'|tail -1"
cmnd = "ssh root@omv.home.lan 'df -h /srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8|awk '\\''{ print $3 }'\\''|tail -1'" logging.info(cmnd)
print(cmnd)
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
used_space = (output.split())[0] used_space = (output.split())[0]
now = datetime.datetime.now() now = datetime.datetime.now()
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S") ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
print("Size : {}".format(used_space)) logging.info("Size : {}".format(used_space))
print("Sending finished status") logging.info("Sending finished status")
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space} #msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
print(msg) msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":ENDTIME,"progress":0,"finished":",".join(finished),"used_space":used_space}
client.connect(broker,1883,10) logging.info(msg)
client.publish(topic_sum, json.dumps(msg))
client.disconnect()
if _MODE == "auto": send_mqtt_message(msg)
hostup = True os.remove(filename.path)
cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
status, output = subprocess.getstatusoutput(cmnd)
while hostup:
#HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
status, output = subprocess.getstatusoutput(cmnd)
# print(status)
# print(output)
if status == 0: topic = "sectorq/amd/restore"
print(f"Backup host up, waiting - {n}\r", end="") for s in servers:
time.sleep(5) #if s != "rack.home.lan":
n += 1 if s == "m-server.home.lan":
continue
elif s == "nas.home.lan":
user = "admin"
cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
else: else:
print("Backup host down " ) user = "root"
hostup = False cmnd = "sudo /myapps/omv_backup.py -r all"
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
if is_port_open(s,22):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
# Add SSH host key automatically if needed.
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Connect to router using username/password authentication.
logger.info(f"Sync {s}")
print(f"Sync {s}")
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
pkey = paramiko.RSAKey.from_private_key_file("/home/jd/.ssh/id_rsa")
ssh.connect(s,
username=user,
look_for_keys=False,
allow_agent=False,
pkey=pkey)
print(cmnd)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
for line in iter(ssh_stdout.readline, ""):
logger.info(line)
print(line, end="")
for line in iter(ssh_stderr.readline, ""):
logger.info(line)
ssh.close()
# if _MODE == "auto":
# hostup = True
# cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
# status, output = subprocess.getstatusoutput(cmnd)
# while hostup:
# #HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
# cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
# status, output = subprocess.getstatusoutput(cmnd)
# # print(status)
# # print(output)
# if status == 0:
# print(f"Backup host up, waiting - {n}\r", end="")
# time.sleep(5)
# n += 1
# else:
# print("Backup host down " )
# hostup = False
try: # try:
url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=off" # url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=off"
x = requests.post(url) # x = requests.post(url)
print(x.text) # print(x.text)
except: # except:
pass # pass
if _SSH_TEST:
user = "root"
cmnd = "ls -la"
topic = "sectorq/amd/backups"
for s in servers:
# if s == "m-server.home.lan":
# continue
# elif s == "nas.home.lan":
# user = "admin"
# cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
if s != "rack.home.lan":
continue
if is_port_open(s,22):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
# Add SSH host key automatically if needed.
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Connect to router using username/password authentication.
logger.info(f"Sync {s}")
print(f"Sync {s}")
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
pkey = paramiko.RSAKey.from_private_key_file("/home/jd/.ssh/id_rsa")
ssh.connect(s,
username=user,
look_for_keys=False,
allow_agent=False,
pkey=pkey)
print(cmnd)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
for line in iter(ssh_stdout.readline, ""):
logger.info(line)
print(line, end="")
for line in iter(ssh_stderr.readline, ""):
logger.info(line)
ssh.close()