This commit is contained in:
2025-09-01 12:47:25 +02:00
parent b536c8ecb1
commit bbe4d72666

View File

@@ -478,12 +478,20 @@ def restore_job(_APP):
cmnd = "ssh root@amd.home.lan 'systemctl suspend &'" cmnd = "ssh root@amd.home.lan 'systemctl suspend &'"
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
def backup_job(server): def backup_job(pl):
client2 = mqtt.Client() client2 = mqtt.Client()
client2.username_pw_set("jaydee", "jaydee1") client2.username_pw_set("jaydee", "jaydee1")
client2.connect("mqtt.home.lan",1883,60) client2.connect("mqtt.home.lan",1883,60)
logging.info(f'starting backup job') logging.info(f'starting backup job')
server = pl["host"]
if pl["mode"] == "dry":
_DRYRUN = True
logging.info("Dry run active")
else:
_DRYRUN = False
logging.info("Full mode active")
finished = [] finished = []
now = datetime.datetime.now() now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S") STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
@@ -554,26 +562,26 @@ def backup_job(server):
progress = 0 progress = 0
cmd = ['rsync', '-avz', '--delete', BACKUP_DIR, '--link-dest', FULL_BACKUP_LATEST, '--exclude-from=/myapps/exclude.txt', NEW_BACKUP_DIR] cmd = ['rsync', '-avz', '--delete', BACKUP_DIR, '--link-dest', FULL_BACKUP_LATEST, '--exclude-from=/myapps/exclude.txt', NEW_BACKUP_DIR]
logging.info(" ".join(cmd)) logging.info(" ".join(cmd))
process = subprocess.Popen(cmd, if not _DRYRUN:
stdout=subprocess.PIPE) process = subprocess.Popen(cmd,
stdout=subprocess.PIPE)
while process.poll() is None: while process.poll() is None:
line = process.stdout.readline().decode("utf-8").split("/") line = process.stdout.readline().decode("utf-8").split("/")
print(line[0]) print(line[0])
if line[0] in apps: if line[0] in apps:
logging.info(f"Working on app {line[0]}") logging.info(f"Working on app {line[0]}")
while True: while True:
if line[0] != apps[0]: if line[0] != apps[0]:
del apps[0] del apps[0]
progress = progress + step progress = progress + step
else: else:
break break
apps.remove(line[0]) apps.remove(line[0])
#print(len(apps)) #print(len(apps))
topic = "sectorq/amd/backups" topic = "sectorq/amd/backups"
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":line[0],"start_time":STARTTIME,"end_time":"in progress","progress":str(round(progress)) + "%","finished":",".join(finished)} msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":line[0],"start_time":STARTTIME,"end_time":"in progress","progress":str(round(progress)) + "%","finished":",".join(finished)}
client2.publish(topic, json.dumps(msg),qos=0, retain=False) client2.publish(topic, json.dumps(msg),qos=0, retain=False)
progress = progress + step progress = progress + step
cmnd = f"rm -rf {FULL_BACKUP_LATEST}" cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
@@ -589,7 +597,7 @@ def backup_job(server):
logging.info("Creating new latest link") logging.info("Creating new latest link")
#print(cmnd) #print(cmnd)
# input("????") # input("????")
if _EXECUTE: if not _DRYRUN:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
#Remove old #Remove old
@@ -597,7 +605,7 @@ def backup_job(server):
cmnd = f"ls {BACKUP_ROOT}" cmnd = f"ls {BACKUP_ROOT}"
if _EXECUTE: if not _DRYRUN:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
for f in output.splitlines(): for f in output.splitlines():
pattern = r"^[0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}-[0-9]{2}-[0-9]{2}$" # regex pattern: string starts with 'abc' pattern = r"^[0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}-[0-9]{2}-[0-9]{2}$" # regex pattern: string starts with 'abc'
@@ -619,7 +627,7 @@ def backup_job(server):
cmnd = f"ls {BACKUP_ROOT}|grep _running" cmnd = f"ls {BACKUP_ROOT}|grep _running"
logging.info(f"removing obsolete dirs") logging.info(f"removing obsolete dirs")
if _EXECUTE: if not _DRYRUN:
status, output = subprocess.getstatusoutput(cmnd) status, output = subprocess.getstatusoutput(cmnd)
for f in output.splitlines(): for f in output.splitlines():
dir_path = f"{BACKUP_ROOT}/{f}" dir_path = f"{BACKUP_ROOT}/{f}"
@@ -650,11 +658,13 @@ def backup_job(server):
topic = "sectorq/backups/start" topic = "sectorq/backups/start"
logging.info(f"LALA : {topic}") logging.info(f"LALA : {topic}")
client2.publish(topic, "finished",qos=0, retain=True) client2.publish(topic, "finished",qos=0, retain=True)
time.sleep(1)
client2.publish(topic, "finished2",qos=0, retain=True) client2.publish(topic, "finished2",qos=0, retain=True)
client2.disconnect() client2.disconnect()
#return "finished" #return "finished"
if _DRYRUN:
return
topic = "sectorq/amd/restore" topic = "sectorq/amd/restore"
for s in servers: for s in servers:
logging.info(f"Restoring {s}") logging.info(f"Restoring {s}")
@@ -752,20 +762,21 @@ def handle_payload(payload):
pl = json.loads(payload) pl = json.loads(payload)
except: except:
pl = payload pl = payload
logging.info(pl) logging.debug(pl)
return
if payload == 'm-server': if "host" in pl:
logging.info("💡 Starting backup job") if pl["host"] == 'm-server':
backup_job(payload) logging.info("💡 Starting backup job")
logging.info(f"💡 Finished backup job") backup_job(pl)
elif payload == 'nas': logging.info(f"💡 Finished backup job")
logging.info("💡 Starting backup job") elif pl["host"] == 'nas':
backup_job(payload) logging.info("💡 Starting backup job")
logging.info(f"💡 Finished backup job") backup_job(pl)
logging.info(f"💡 Finished backup job")
else:
logging.error(f"⚠️ Unknown command: {pl}")
else: else:
logging.error(f"⚠️ Unknown command: {payload}") logging.error(f"⚠️ Wrong payload: {pl}")
# Callback when connected # Callback when connected
def on_connect(client, userdata, flags, rc): def on_connect(client, userdata, flags, rc):
if rc == 0: if rc == 0: