Compare commits

...

130 Commits

Author SHA1 Message Date
1ecedd7c00 build 2025-06-19 17:22:26 +02:00
87c671d23b build 2025-06-19 13:16:04 +02:00
97297f0c4e build 2025-06-19 12:55:12 +02:00
58e2596d3d build 2025-06-19 12:51:55 +02:00
ff736e20f1 build 2025-06-19 12:46:06 +02:00
94f5a08920 build 2025-06-19 12:20:01 +02:00
3f7a77b7b0 build 2025-05-26 19:23:19 +02:00
687ffc828d build 2025-05-26 07:40:12 +02:00
f17f1ca372 build 2025-05-26 07:39:42 +02:00
d5b114e771 build 2025-05-26 07:37:06 +02:00
edb3091b2a build 2025-05-24 20:25:59 +02:00
10edb2b533 Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 20:25:39 +02:00
3bffbd872d build 2025-05-24 20:25:35 +02:00
af5d64200c Update .gitlab-ci.yml file 2025-05-24 20:25:24 +02:00
95dd8fb52b build 2025-05-24 20:24:15 +02:00
45476954cc build 2025-05-24 20:23:26 +02:00
386e3ec75c Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 20:22:53 +02:00
854f0c828a build 2025-05-24 20:22:45 +02:00
06c776ee90 Update .gitlab-ci.yml file 2025-05-24 20:22:37 +02:00
71d661dfb5 build 2025-05-24 20:16:00 +02:00
ca518405ff build 2025-05-24 20:05:33 +02:00
a1b4aff656 build 2025-05-24 19:37:01 +02:00
b40a7f795e build 2025-05-24 19:32:21 +02:00
50d9f18969 Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 19:31:39 +02:00
baa26b3a09 build 2025-05-24 19:31:33 +02:00
eff8120428 Update .gitlab-ci.yml file 2025-05-24 19:30:58 +02:00
25151a5776 build 2025-05-24 19:29:55 +02:00
82b9f93e13 build 2025-05-24 19:27:30 +02:00
6fa3c016b2 Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 19:26:45 +02:00
23d32537c7 build 2025-05-24 19:26:39 +02:00
607f58ae0a buil 2025-05-24 19:26:29 +02:00
dce4ccb711 Update .gitlab-ci.yml file 2025-05-24 19:24:53 +02:00
b66a4598de buil 2025-05-24 19:20:34 +02:00
3d2d8e5c3d build 2025-05-24 19:19:05 +02:00
b12359c373 Update .gitlab-ci.yml file 2025-05-24 19:18:44 +02:00
cc698871f6 build 2025-05-24 19:17:21 +02:00
66867ae717 build 2025-05-24 19:16:19 +02:00
9554f194ac build 2025-05-24 19:15:18 +02:00
4ea82d55a8 build 2025-05-24 19:13:53 +02:00
d855defac0 build 2025-05-24 19:13:25 +02:00
a90c1d5589 Update .gitlab-ci.yml file 2025-05-24 19:13:13 +02:00
5cae7332d5 build 2025-05-24 19:11:47 +02:00
e17d538c2b Update .gitlab-ci.yml file 2025-05-24 19:11:29 +02:00
f9266b0bc9 Update .gitlab-ci.yml file 2025-05-24 19:10:53 +02:00
7909eadad5 build 2025-05-24 19:09:37 +02:00
c5a71ef749 Update .gitlab-ci.yml file 2025-05-24 19:09:28 +02:00
7a3da9ee7d Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 19:09:04 +02:00
ab5ba0e10d build 2025-05-24 19:09:01 +02:00
db3d4245d9 Update .gitlab-ci.yml file 2025-05-24 19:08:50 +02:00
6b522f6cbc build 2025-05-24 19:08:04 +02:00
39d16f6f2c Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 19:07:18 +02:00
5ed0031208 build 2025-05-24 19:07:16 +02:00
fded5b677c Update .gitlab-ci.yml file 2025-05-24 19:07:05 +02:00
67b73319e0 build 2025-05-24 19:05:53 +02:00
9daa539028 Update .gitlab-ci.yml file 2025-05-24 19:05:19 +02:00
f22b99030b Update .gitlab-ci.yml file 2025-05-24 19:04:47 +02:00
ac2d7d212e Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-24 19:01:46 +02:00
6a52d29803 build 2025-05-24 19:01:38 +02:00
fd99f4ef78 Update .gitlab-ci.yml file 2025-05-24 19:01:29 +02:00
daeb2901f2 build 2025-05-24 19:00:15 +02:00
59f6d1669b added v3 2025-05-24 18:59:45 +02:00
ca9a2d0969 added v3 2025-05-24 18:58:53 +02:00
6ff4833e0b added v3 2025-05-24 18:57:33 +02:00
506e4cc7df added v3 2025-05-24 18:57:07 +02:00
ccf4d16c55 added v3 2025-05-24 18:56:51 +02:00
6acf13de4e added v3 2025-05-24 18:54:50 +02:00
e9b1c18bb7 added v3 2025-05-24 18:52:54 +02:00
6f70dbc83c Update .gitlab-ci.yml file 2025-05-24 18:52:40 +02:00
2951d204a6 Update .gitlab-ci.yml file 2025-05-24 18:48:58 +02:00
15e1275c5a Update .gitlab-ci.yml file 2025-05-24 18:45:30 +02:00
c636e1f2e5 Update .gitlab-ci.yml file 2025-05-24 18:44:47 +02:00
97ff1a3197 added v3 2025-05-24 18:43:06 +02:00
6bd7d5dde6 Update .gitlab-ci.yml file 2025-05-21 13:47:56 +02:00
bfefa386a5 Update .gitlab-ci.yml file 2025-05-21 13:46:50 +02:00
afe5faae8a Update .gitlab-ci.yml file 2025-05-21 13:45:23 +02:00
047001a93b renamed customer user group 2025-05-21 13:41:29 +02:00
8f860f1180 Update .gitlab-ci.yml file 2025-05-21 13:38:54 +02:00
58c9816677 Update .gitlab-ci.yml file 2025-05-21 13:38:22 +02:00
665b0b36b4 build 2025-05-20 13:46:00 +02:00
987d04fc86 build 2025-05-20 13:44:18 +02:00
94bab11c58 build 2025-05-20 13:43:26 +02:00
69b2ab1920 build 2025-05-20 13:42:50 +02:00
120a2127c4 bui11 2025-05-20 13:42:26 +02:00
341edd4399 bui11 2025-05-20 13:38:06 +02:00
0797e69619 bui11 2025-05-20 13:35:51 +02:00
19d039ec13 bui11 2025-05-20 13:34:02 +02:00
f2d6f3f391 Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-20 13:30:01 +02:00
0307bf4aff bui11 2025-05-20 13:29:58 +02:00
da25006c74 Delete .gitlab-ci.yml 2025-05-20 13:29:33 +02:00
cded28f0cc Update .gitlab-ci.yml file 2025-05-20 13:25:19 +02:00
4134d56c98 bui11 2025-05-20 13:24:53 +02:00
99e111d089 Update .gitlab-ci.yml file 2025-05-20 13:23:01 +02:00
1e0ee17dda bui11 2025-05-20 13:17:00 +02:00
59b8436596 bui11 2025-05-20 13:15:07 +02:00
9d07b227fb Update .gitlab-ci.yml file 2025-05-20 13:14:50 +02:00
a42a9ac8b7 Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-20 13:14:29 +02:00
c4c8f6421a bui11 2025-05-20 13:14:22 +02:00
62c235ff4c Update .gitlab-ci.yml file 2025-05-20 13:13:15 +02:00
459a58cdc7 Update .gitlab-ci.yml file 2025-05-20 13:13:01 +02:00
29f2e7dd95 bui11 2025-05-20 13:11:49 +02:00
703a67f516 Update .gitlab-ci.yml file 2025-05-20 13:11:44 +02:00
9f7f13e7cb bui11 2025-05-20 13:07:57 +02:00
9c34880ba9 bui11 2025-05-20 13:05:24 +02:00
7eb37d2184 bui11 2025-05-20 13:04:23 +02:00
69a404ccad Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-20 13:02:57 +02:00
6d4602aeaa build 2025-05-20 13:02:53 +02:00
a50bdeb9d3 Update .gitlab-ci.yml file 2025-05-20 13:02:36 +02:00
b7342433bf build 2025-05-20 12:59:09 +02:00
41c5628fbb build 2025-05-20 12:57:50 +02:00
7fadea9fa7 build 2025-05-20 12:55:49 +02:00
16b0e0e7e4 Merge branch 'main' of gitlab.sectorq.eu:jaydee/omv_backup 2025-05-20 12:55:04 +02:00
0082963725 added v3 2025-05-20 12:54:41 +02:00
004a737215 Update .gitlab-ci.yml file 2025-05-20 12:53:34 +02:00
d795a5e4cd build 2025-05-20 12:04:47 +02:00
d9c9b010a9 Update .gitlab-ci.yml file 2025-05-20 12:03:46 +02:00
a3e804c80d Update .gitlab-ci.yml file 2025-05-20 12:01:07 +02:00
dd9d7e1241 added v3 2025-05-20 11:44:56 +02:00
134e538b1f added v3 2025-05-20 09:28:18 +02:00
a2f42d41c2 added v3 2025-05-20 09:27:04 +02:00
9e51ed3707 added v3 2025-05-06 18:04:02 +02:00
f03a7362bd added v3 2025-05-06 11:36:56 +02:00
2913f8c5cd added v3 2025-05-06 04:23:46 +02:00
7bafeb227a added v3 2025-05-06 03:32:34 +02:00
2fbe457fde added v3 2025-05-06 03:26:37 +02:00
8c53b07cf5 added v3 2025-05-05 22:20:27 +02:00
420423066a build 2025-05-05 18:34:57 +02:00
02999e5619 build 2025-05-05 18:34:30 +02:00
a414a04bd0 build 2025-05-05 18:32:42 +02:00
0365c5e4e2 build 2025-05-05 18:29:43 +02:00
1ee67ce3e9 build 2025-05-05 18:27:55 +02:00
5 changed files with 649 additions and 335 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.gitlab-ci.yml

View File

@ -1,9 +1,30 @@
stages:
# This file is a template, and might need editing before it works on your project.
# This is a sample GitLab CI/CD configuration file that should run without any modifications.
# It demonstrates a basic 3 stage CI/CD pipeline. Instead of real tests or scripts,
# it uses echo commands to simulate the pipeline execution.
#
# A pipeline is composed of independent jobs that run scripts, grouped into stages.
# Stages run in sequential order, but jobs within stages run in parallel.
#
# For more information, see: https://docs.gitlab.com/ee/ci/yaml/#stages
#
# You can copy and paste this template into a new `.gitlab-ci.yml` file.
# You should not add this template to an existing `.gitlab-ci.yml` file by using the `include:` keyword.
#
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/development/cicd/templates/
# This specific template is located at:
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml
stages: # List of stages for jobs, and their order of execution
- build
build_job:
build-job: # This job runs in the build stage, which runs first.
stage: build
script:
- echo "Running build pipeline"
- column=":"
- echo "${flow_id}"
- curl -X POST https://kestra.sectorq.eu/api/v1/executions/webhook/jaydee/ansible-all/${flow_id} -d '{"tag":["setup","omv_backup"],"target":["servers"]}' -H "Content-Type${column} application/json"
rules:
- if: '$CI_COMMIT_MESSAGE =~ /build/'

View File

@ -1,18 +1,52 @@
import subprocess
import requests
import datetime
import os
import shutil
now = datetime.datetime.now()
PASSWORD = "l4c1j4yd33Du5lo"
DATETIME = now.strftime("%Y%m%d%H%M%S")
os.chdir("/share/docker_data/__backups/")
print("Backup gitlab")
cmnd = 'docker exec -t gitlab gitlab-backup create SKIP=artifacts,repositories,registry,uploads,builds,pages,lfs,packages,terraform_state'
status, output = subprocess.getstatusoutput(cmnd)
allfiles = os.listdir("/share/docker_data/gitlab/data/backups/")
for f in allfiles:
shutil.move(f"/share/docker_data/gitlab/data/backups/{f}", "/share/docker_data/__backups/")
print("Backup nextcloud")
cmnd = 'docker exec --user www-data nextcloud-app-1 php occ maintenance:mode --on'
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"docker exec nextcloud-db-1 sh -c 'exec mysqldump --single-transaction -h localhost -u nextcloud -pl4c1j4yd33Du5lo nextcloud' > /share/docker_data/__backups/nextcloudDB_{DATETIME}.sql"
cmnd = f"docker exec nextcloud-db-1 sh -c 'exec mysqldump --single-transaction -h localhost -u nextcloud -p{PASSWORD} nextcloud' > /share/docker_data/__backups/nextcloudDB_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = "docker exec --user www-data nextcloud-app-1 php occ maintenance:mode --off"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"zip -P {PASSWORD} nextcloudDB_{DATETIME}.sql.zip nextcloudDB_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
os.remove(f"nextcloudDB_{DATETIME}.sql")
print("Backup Bookstack")
cmnd = f"docker exec bookstack-db-1 sh -c 'exec mysqldump --single-transaction -h localhost -u bookstack -p{PASSWORD} bookstackapp' > /share/docker_data/__backups/bookstack_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"zip -P {PASSWORD} bookstack_{DATETIME}.sql.zip bookstack_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
os.remove(f"bookstack_{DATETIME}.sql")
print("Backup Kestra")
cmnd = f"docker exec kestra-postgres-1 sh -c 'pg_dump -h localhost -p 5432 -U kestra -d kestra' > /share/docker_data/__backups/kestra_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"zip -P {PASSWORD} kestra_{DATETIME}.sql.zip kestra_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
os.remove(f"kestra_{DATETIME}.sql")
print("Backup Authentik")
cmnd = f"docker exec authentik-postgresql-1 sh -c 'pg_dump -h localhost -p 5432 -U authentik -d authentik' > /share/docker_data/__backups/authentik_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"zip -P {PASSWORD} authentik_{DATETIME}.sql.zip authentik_{DATETIME}.sql"
status, output = subprocess.getstatusoutput(cmnd)
os.remove(f"authentik_{DATETIME}.sql")
print("Backup portainer")
headers = {
@ -21,7 +55,7 @@ headers = {
}
json_data = {
'password': 'l4c1j4yd33Du5lo',
'password': PASSWORD,
}
response = requests.post('https://portainer.sectorq.eu/api/backup', headers=headers, json=json_data, verify=True)
@ -33,3 +67,7 @@ response = requests.post('https://portainer.sectorq.eu/api/backup', headers=head
with open(f'/share/docker_data/__backups/portainer_snapshot_{DATETIME}.tar.gz', 'wb') as f:
f.write(response.content)
os.chdir("/share/docker_data/__backups/")
print("Remove Old Files")
cmnd = f" find ./ -maxdepth 1 -type f -mtime +15 -exec rm -f {{}} \\;"
status, output = subprocess.getstatusoutput(cmnd)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/myapps/venv/bin/python3
import datetime
import logging
from paho.mqtt import client as mqtt_client
@ -7,19 +7,40 @@ import json
import time
import socket
import subprocess
from subprocess import Popen, PIPE, CalledProcessError
import sys
import os
import re
import platform
import requests
import fnmatch
import yaml
import paramiko
import numpy as np
file_path = os.path.realpath(__file__)
dir_path = os.path.dirname(file_path)
VERSION="1.0.7"
# print(file_path)
# print(dir_path)
os.chdir(dir_path)
from wakeonlan import send_magic_packet
pid = os.getpid()
def is_port_open(host, port):
try:
sock = socket.create_connection((host, port))
sock.close()
return True
except socket.error:
return False
servers = ["rpi5.home.lan","nas.home.lan","rack.home.lan","m-server.home.lan"]
host = platform.node().lower()
#input(host)
cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid)
status, output = subprocess.getstatusoutput(cmnd)
if int(output) > 0:
print("Running already!")
sys.exit()
def is_port_open(host, port):
try:
sock = socket.create_connection((host, port))
@ -29,59 +50,91 @@ def is_port_open(host, port):
return False
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# doesn't even have to be reachable
conn = False
while not conn:
try:
s.connect(('192.168.77.1', 1))
IP = s.getsockname()[0]
print(IP)
print(output)
if int(output) > 0:
print("Running already!")
sys.exit()
conn = True
except:
time.sleep(5)
broker = 'mqtt.home.lan'
port = 1883
topic_sum = "sectorq/omv/backups"
topic_sum = "sectorq/amd/backups"
mqtt_username = 'jaydee'
mqtt_password = 'jaydee1'
print("1")
try:
opts, args = getopt.getopt(sys.argv[1:], "amftdr:b", ["command=", "help", "output="])
opts, args = getopt.getopt(sys.argv[1:], "hTamftDr:bd:sSOl:", ["command=", "help", "output="])
except getopt.GetoptError as err:
#usage()
sys.exit(2)
output = None
# QJ : getopts
_MODE = "manual"
_FIRST = _TEST = _RESTORE = _BACKUP = False
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = _SSH_TEST = False
_EXECUTE = True
_DATE = "pick"
_LOG_LEVEL = ""
for o, a in opts:
if o == "-a":
_MODE = "auto"
elif o in ("-m", "--manual"):
_MODE = "manual"
elif o in ("-l", "--level"):
_LOG_LEVEL = a.upper()
elif o in ("-f", "--first"):
_FIRST = True
elif o in ("-d", "--date"):
_DATE = a
elif o in ("-t", "--test"):
_TEST = True
elif o in ("-s", "--sync"):
_SYNC = True
elif o in ("-S", "--start"):
_START = True
elif o in ("-O", "--stop"):
_STOP = True
elif o in ("-r", "--restore"):
_RESTORE = True
_APP = a
print("RESTORE")
elif o in ("-b", "--backup"):
_BACKUP = True
elif o in ("-d", "--dry"):
elif o in ("-D", "--dry"):
_EXECUTE = False
elif o in ("-T", "--dry"):
_SSH_TEST = True
elif o in ("-h", "--help"):
print(VERSION)
sys.exit()
LOG_FILE = "omv_backup.log"
if _LOG_LEVEL == "DEBUG":
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.debug('using debug loging')
elif _LOG_LEVEL == "ERROR":
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('using error loging')
elif _LOG_LEVEL == "SCAN":
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('using error loging')
else:
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info("script started")
print("2")
logger = logging.getLogger(__name__)
client_id = "dasdasdasd333"
try:
client = mqtt_client.Client(mqtt_client.CallbackAPIVersion.VERSION1, client_id)
except:
client = mqtt_client.Client()
client.username_pw_set(mqtt_username, mqtt_password)
client.connect(broker,1883,60)
now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
msg = {"mode":_MODE, "status":"started","bak_name":"complete","start_time":STARTTIME,"end_time":"in progress","progress":0}
client.publish(topic_sum, json.dumps(msg));
client.disconnect()
backups = {
"nas": {
"login": "admin@nas.home.lan",
"jobs": {
"github":
{"source":"/share/Data/__GITHUB",
"exclude":"",
@ -91,9 +144,12 @@ backups = {
"source":"/share/Photo/Years",
"exclude":"",
"active":True
},
}
}
},
"m-server":{
"login": "root@m-server.home.lan",
"jobs": {
"docker_data":{
"source":"/share/docker_data/",
"exclude":"",
@ -104,6 +160,7 @@ backups = {
"exclude":"",
"active":True
}
}
},
"rpi5.home.lan":{
"docker_data":{
@ -118,162 +175,236 @@ backups = {
}
}
}
BACKUP_FS = "/srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8"
BACKUP_HOST = "omv.home.lan"
BACKUP_FS = "/media/backup/"
BACKUP_HOST = "amd.home.lan"
#BACKUP_HOST = "morefine.home.lan"
if not host in backups and _BACKUP:
print(f"No backup jobs for {host}")
sys.exit()
print("Test connection")
print("3")
logging.info("Test connection")
hm = socket.gethostbyaddr(BACKUP_HOST)
hostdown = True
n=0
logging.info(_RESTORE)
def send_mqtt_message(msg):
try:
url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=on"
x = requests.post(url)
print(x.text)
except:
pass
while hostdown:
#HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
status, output = subprocess.getstatusoutput(cmnd)
# print(status)
# print(output)
if status != 0:
send_magic_packet('88:c9:b3:b5:23:d8')
print(f"Backup host down, waiting - {n}\r", end="")
time.sleep(5)
n += 1
else:
print("Backup host up " )
hostdown = False
port = 22 # Replace with the port you want to test
n=0
while not is_port_open(BACKUP_HOST, port):
print(f"Port {port} on {BACKUP_HOST} is closed. {n}\r", end="")
time.sleep(5)
n += 1
print(f"Port {port} on {BACKUP_HOST} is open.")
print("Starting")
print(_RESTORE)
if _RESTORE:
print("Starting Restore")
if _APP == "all":
_APP = ["nginx","ha","gitlab","mailu","bitwarden","esphome","grafana","ingluxdb","kestra","matter-server","mosquitto","octoprint","octoprint2","pihole","unify_block","webhub"]
else:
_APP = _APP.split(",")
for app in _APP:
topic = "sectorq/omv/restore/{}".format(app)
client.connect(broker,1883,60)
msg = {"status":"inactive","bak_name":app,"start_time":"inactive","end_time":"inactive","progress":0}
client.publish(topic, json.dumps(msg))
client.disconnect()
except ValueError as e:
logging.error("Failed to send")
print("Failed to send")
print(e)
if _SYNC:
containers = ["HomeAssistant","webhub-web-1","heimdall","pihole","mosquitto-mosquitto-1","mailu3-redis-1","mailu3-webmail-1","mailu3-resolver-1","mailu3-antispam-1","mailu3-webdav-1","mailu3-smtp-1","mailu3-oletools-1","mailu3-front-1","mailu3-fetchmail-1","mailu3-imap-1","matter-server","piper-en","openwakeword","whisper-en","auth-worker-1","auth-server-1","auth-authentik_ldap-1","auth-redis-1","auth-postgresql-1","nginx-app-1"]
cmnd = f"curl -H 'Authorization: Bearer l4c1j4yd33Du5lo' 192.168.77.238:8094/v1/update"
logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
if _START:
for c in containers:
cmnd = f"docker start {c}"
print(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
if _STOP:
cmnd = "docker ps"
status, running_containers = subprocess.getstatusoutput(cmnd)
logging.info(running_containers)
for c in running_containers.splitlines():
print(c.split()[-1])
if c.split()[-1] == "watchtower-watchtower-1":
continue
cmnd = f"docker stop {c.split()[-1]}"
status, running_containers = subprocess.getstatusoutput(cmnd)
if _RESTORE:
logging.info("Starting Restore")
print("Starting Restore")
now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
if _APP == "all":
_DATE = "latest"
if host == "rpi5.home.lan" or host == "rpi5":
_APP = ["__backups", "nginx","ha","gitea","gitlab","mailu","bitwarden","esphome","grafana","ingluxdb","kestra","matter-server","mosquitto","octoprint","octoprint2","pihole","unify_block","webhub","homepage","watchtower"]
else:
cmnd = "ssh root@amd.home.lan 'ls /mnt/raid/backup/m-server/docker_data/latest'"
status, output = subprocess.getstatusoutput(cmnd)
_APP = output.splitlines()
logging.info(_APP)
#input("????")
else:
_APP = _APP.split(",")
PROGRESS = 0
topic = "sectorq/amd/restore"
step = 100 / len(_APP)
for app in _APP:
msg = {"mode":_MODE, "status":"restore","bak_name":"Restore","host":host,"cur_job":app,"start_time":STARTTIME,"end_time":"","progress":str(round(np.ceil(PROGRESS))) + "%","finished":1,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
PROGRESS = PROGRESS + step
now = datetime.datetime.now()
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
BACKUP_HOST = "root@omv.home.lan"
BACKUP_DEVICE = "/srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8"
BACKUP_HOST = f"root@amd.home.lan"
BACKUP_DEVICE = "/mnt/raid"
BACKUP_DIR = f"/backup/{host}"
if _DATE == "pick":
cmnd = f"ssh root@amd.home.lan 'ls {BACKUP_DEVICE}/backup/m-server/docker_data'"
status, output = subprocess.getstatusoutput(cmnd)
print(output)
dates = output.splitlines()
n = 1
for i in dates:
print(f"{n} - {i}" )
n += 1
ans = input("Pick a backup to restore : ")
_DATE = dates[int(ans) - 1]
if app == "fail2ban":
print("?>?????")
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/latest/"
logging.info("?>?????")
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/{_DATE}/"
SOURCE_DIR = f"/etc/fail2ban"
else:
NEW_BACKUP_DIR = f"/backup/m-server/docker_data/latest/{app}"
NEW_BACKUP_DIR = f"/backup/m-server/docker_data/{_DATE}/{app}"
SOURCE_DIR = f"/share/docker_data/"
if _FIRST:
BACKUP_PATH="{}/initial".format(BACKUP_DIR)
else:
BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME)
LATEST_LINK="{}/latest".format(BACKUP_DIR)
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/latest"
LATEST_LINK = f"/{host}/{app}/latest"
LATEST_LINK="{}/{}".format(BACKUP_DIR,_DATE)
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/{_DATE}"
LATEST_LINK = f"/{host}/{app}/{_DATE}"
msg = {"status":"started","bak_name":app,"start_time":DATETIME,"end_time":"in progress", "progress":0}
client.connect(broker,1883,60)
client.publish(topic, json.dumps(msg))
client.disconnect()
print("Create backup dir")
print(cmnd)
logging.info("Create backup dir")
logging.info(cmnd)
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
if app == "heimdall":
print("Stopping docker")
logging.info("Stopping docker")
cmnd = "docker stop heimdall"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}"
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
print(cmnd)
print("Sync files")
logging.info(cmnd)
logging.info("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"]
for e in entries:
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\""
print(cmnd)
logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}')
contents = re.sub(regex, IP , output)
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\""
print(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
cmnd = "docker start heimdall"
logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
# cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
if app == "ha":
print("Stopping docker")
logging.info("Stopping docker")
cmnd = "docker stop heimdall"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}"
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
print(cmnd)
print("Sync files")
logging.info(cmnd)
logging.info("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
print("Start docker")
cmnd = "docker start heimdall"
status, output = subprocess.getstatusoutput(cmnd)
if app == "fail2ban":
print("Stopping docker")
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}"
logging.info("Start docker")
# cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
elif app == "fail2ban":
logging.info("Stopping docker")
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
print(cmnd)
print("Sync files")
logging.info(cmnd)
logging.info("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
print("Start docker")
cmnd = "docker start heimdall"
logging.info("Start docker")
# cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
elif app == "homepage":
logging.info("Stopping docker")
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
logging.info(cmnd)
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
elif app == "nginx":
print("Stopping docker")
file = "/share/docker_data/homepage/config/widgets.yaml"
with open(file, 'r') as stream:
try:
loaded = yaml.load(stream, Loader=yaml.FullLoader)
except yaml.YAMLError as exc:
logging.info(exc)
# Modify the fields from the dict
#loaded['logo']['icon'] = "/images/morefine2.png"
logging.info(json.dumps(loaded, indent=2))
i = 0
for y in loaded:
logging.info(i)
logging.info(y)
if "logo" in y:
if host == "rpi5.home.lan" or host == "rpi5":
loaded[i]['logo']['icon'] = "/images/rpi5.png"
elif host == "nas.home.lan":
loaded[i]['logo']['icon'] = "/images/qnap_nas.png"
elif host == "rack.home.lan":
loaded[i]['logo']['icon'] = "/images/rack.png"
else:
loaded[i]['logo']['icon'] = "/images/morefine2.png"
i+=1
# Save it again
logging.info(f"writing to file {file}")
with open(file, 'w') as stream:
try:
yaml.dump(loaded, stream, default_flow_style=False)
except yaml.YAMLError as exc:
print("failed")
print(exc)
logging.info("Start docker")
# cmnd = "docker start heimdall"
# status, output = subprocess.getstatusoutput(cmnd)
elif app == "nginx1":
logging.info("Stopping docker")
cmnd = "docker stop nginx-app-1"
status, output = subprocess.getstatusoutput(cmnd)
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}"
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
print(cmnd)
print("Sync files")
logging.info(cmnd)
logging.info("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
@ -281,151 +412,193 @@ if _RESTORE:
domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"]
for d in domains:
cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"'
print(cmnd)
logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
cmnd = 'egrep -l "# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
cmnd = 'egrep -l "# bazarr.sectorq.eu|# gitea.sectorq.eu|# jf.sectorq.eu|# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
status, output = subprocess.getstatusoutput(cmnd)
print(output.splitlines())
logging.info(output.splitlines())
for file in output.splitlines():
print(file)
logging.info(file)
f = open(file)
contents = f.read()
f.close()
regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";')
contents = re.sub(regex, f'\n set $server \"{IP}\";', contents)
#print(contents)
print(regex)
logging.info(regex)
f = open(file, "w")
contents = f.write(contents)
f.close()
status, output = subprocess.getstatusoutput(cmnd)
print("Starting docker")
cmnd = "docker start nginx-app-1"
status, output = subprocess.getstatusoutput(cmnd)
logging.info("Starting docker")
# cmnd = "docker start nginx-app-1"
# status, output = subprocess.getstatusoutput(cmnd)
else:
cmnd = f"rsync -avz --delete rsync://{BACKUP_HOST}{NEW_BACKUP_DIR} {SOURCE_DIR}"
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
ans = "y"
print(cmnd)
print("Sync files")
logging.info(cmnd)
logging.info("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
now = datetime.datetime.now()
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
msg = {"status":"finished","bak_name":app,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
client.connect(broker,1883,10)
client.publish(topic, json.dumps(msg))
client.disconnect()
now = datetime.datetime.now()
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
print("Sending finished status")
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":"?"}
print(msg)
client.connect(broker,1883,10)
client.publish(topic_sum, json.dumps(msg))
client.disconnect()
logging.info("Sending finished status")
msg = {"mode":_MODE, "status":"restore","bak_name":"Restore","host":host,"cur_job":app,"start_time":STARTTIME,"end_time":"","progress":100,"finished":ENDJOB,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
if _MODE == "auto":
cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
cmnd = "ssh root@amd.home.lan 'systemctl suspend &'"
status, output = subprocess.getstatusoutput(cmnd)
if _BACKUP:
last = 1
while True:
directory = '/backups/'
count = len(fnmatch.filter(os.listdir(directory), '*.*'))
print('File Count:', count)
count = len(fnmatch.filter(os.listdir(directory), '*'))
if last != count:
logging.info(f'File Count: {count}')
last = count
if count == 0:
time.sleep(10)
continue
else:
finished = []
now = datetime.datetime.now()
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
topic = "sectorq/amd/backups"
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":"","cur_job":"","start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
send_mqtt_message(msg)
# iterate over files in
# that directory
for filename in os.scandir(directory):
if filename.is_file():
logging.info(filename.path)
logging.info(filename.name)
if filename.name == "restore":
break
host = filename.name
logging.info("Backup")
for b in backups[host]["jobs"]:
print(filename.path)
print("Backup")
for b in backups[host]:
topic = "sectorq/omv/backups/{}".format(b.lower())
if not backups[host][b]["active"]:
print("Backup {} is not active!".format(b))
client.connect(broker,1883,60)
if not backups[host]["jobs"][b]["active"]:
logging.info("Backup {} is not active!".format(b))
msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0}
client.publish(topic, json.dumps(msg))
client.disconnect()
send_mqtt_message(msg)
continue
SOURCE_DIR = backups[host][b]["source"]
SOURCE_DIR = backups[host]["jobs"][b]["source"]
now = datetime.datetime.now()
BACKUP_HOST = "root@omv.home.lan"
BACKUP_DEVICE = "/srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8"
BACKUP_DIR = f"/backup/{host}/{b}"
NEW_BACKUP_DIR = f"{BACKUP_DEVICE}/backup/{host}/{b}"
BACKUP_HOST = backups[host]["login"]
BACKUP_DEVICE = "/mnt/raid"
BACKUP_DIR = f"{BACKUP_HOST}:{SOURCE_DIR}"
BACKUP_ROOT = f"{BACKUP_DEVICE}/backup/{host}/{b}"
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
if _FIRST:
BACKUP_PATH="{}/initial".format(BACKUP_DIR)
NEW_BACKUP_DIR = f"{BACKUP_ROOT}/initial"
else:
BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME)
LATEST_LINK="{}/latest".format(BACKUP_DIR)
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/latest"
LATEST_LINK = f"/{host}/{b}/latest"
NEW_BACKUP_DIR = f"{BACKUP_ROOT}/{DATETIME}_running"
FULL_BACKUP_LATEST = f"{BACKUP_ROOT}/latest"
msg = {"status":"started","bak_name":b,"start_time":DATETIME,"end_time":"in progress", "progress":0}
# msg = {"status":"started","bak_name":b,"start_time":DATETIME,"end_time":"in progress", "progress":0}
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
client.connect(broker,1883,60)
client.publish(topic, json.dumps(msg))
client.publish(topic, json.dumps(msg),qos=0, retain=True)
client.disconnect()
cmnd = "ssh root@omv.home.lan 'mkdir -p " + NEW_BACKUP_DIR + "'"
cmnd = "mkdir -p " + NEW_BACKUP_DIR
logging.info(cmnd)
if _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
print("Create backup dir")
print(cmnd)
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
if _FIRST:
cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
else:
cmnd = f"rsync -avz --delete {SOURCE_DIR} --link-dest {LATEST_LINK} --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
ans = "y"
print(cmnd)
print("Sync files")
if _TEST:
ans = input("continue?") or "n"
if ans == "y" and _EXECUTE:
# rsync --info=progress2 -avz --delete /share/docker_data/ --link-dest /m-server/docker_data/latest --exclude="gitlab/data/" --exclude="esphome/config/.esphome" --exclude="gitlab/logs/prometheus" --exclude=".cache" --exclude=".git" --exclude="var_lib_motioneye" /m-server/m-server/docker_data/newone1
# input("????")
logging.info(output)
logging.info(status)
logging.info("Create backup dir")
cmnd = f"ssh {BACKUP_HOST} 'ls {SOURCE_DIR}'"
logger.debug(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
logger.debug(output)
apps = output.splitlines()
c = len(apps)
print(apps)
print(len(apps))
cmnd = f"ssh root@omv.home.lan 'rm -rf {FULL_BACKUP_LATEST}'"
step = round(100 / c,1)
progress = 0
#cmd = f"rsync -avz --delete {BACKUP_DIR} --link-dest {FULL_BACKUP_LATEST}/ --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
#cmd = [ 'rsync', '-avz','--info=progress2', BACKUP_DIR , NEW_BACKUP_DIR]
#print(cmnd)
print("Removing latest link")
cmd = ['rsync', '-avz', '--delete', BACKUP_DIR, '--link-dest', FULL_BACKUP_LATEST, '--exclude="jellyfin/cache/transcodes"', '--exclude=".@__thumb/"', '--exclude="gitlab/logs/prometheus"', '--exclude="home-assistant.log"', '--exclude="gitlab/logs/*"', '--exclude="esphome/config/.esphome"', '--exclude=".cache"', '--exclude=".git"', '--exclude="var_lib_motioneye"', NEW_BACKUP_DIR]
logging.info(" ".join(cmd))
process = subprocess.Popen(cmd,
stdout=subprocess.PIPE)
while process.poll() is None:
line = process.stdout.readline().decode("utf-8").split("/")
print(line[0])
if line[0] in apps:
logging.info(f"Working on app {line[0]}")
while True:
if line[0] != apps[0]:
del apps[0]
progress = progress + step
else:
break
apps.remove(line[0])
#print(len(apps))
topic = "sectorq/amd/backups"
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":line[0],"start_time":STARTTIME,"end_time":"in progress","progress":str(round(progress)) + "%","finished":",".join(finished)}
send_mqtt_message(msg)
progress = progress + step
# input(apps)
# for a in apps:
# logging.info(f"App {a}")
# topic = "sectorq/amd/backups"
# msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
# send_mqtt_message(msg)
# logger.debug(cmnd)
# if _FIRST:
# cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
# else:
# cmnd = f"rsync -avz --delete {BACKUP_DIR}{a} --link-dest {FULL_BACKUP_LATEST}/{a} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
# ans = "y"
# logging.info(cmnd)
# logging.info("Sync files1")
# #input("??????")
# if _TEST:
# ans = input("continue?") or "n"
# if ans == "y" and _EXECUTE:
# status, output = subprocess.getstatusoutput(cmnd)
# #proc = subprocess.Popen(cmnd,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd = "/myapps/",shell=True)
# progress = progress + step
# topic = "sectorq/amd/backups"
# msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
# send_mqtt_message(msg)
cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
logging.info(cmnd)
logging.info("Removing latest link")
# input("????")
if _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
if _FIRST:
cmnd = f"ssh root@omv.home.lan 'cd {NEW_BACKUP_DIR}; ln -s initial latest'"
cmnd = f"cd {BACKUP_ROOT}; ln -s initial latest"
else:
cmnd = f"ssh root@omv.home.lan 'cd {NEW_BACKUP_DIR}; ln -s {DATETIME} latest'"
print("Creating new latest link")
cmnd = f"cd {BACKUP_ROOT}; mv {DATETIME}_running {DATETIME};ln -s {DATETIME} latest"
logging.info("Creating new latest link")
#print(cmnd)
# input("????")
if _EXECUTE:
@ -433,66 +606,147 @@ if _BACKUP:
#Remove old
print("Removing old dirs")
logging.info("Removing old dirs")
# input("????")
#cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR)
cmnd = f"cd {NEW_BACKUP_DIR} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
cmnd = f"cd {BACKUP_ROOT} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
#print(cmnd)
# input("????")
if _EXECUTE:
status, output = subprocess.getstatusoutput(cmnd)
now = datetime.datetime.now()
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
msg = {"status":"finished","bak_name":b,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
client.connect(broker,1883,10)
client.publish(topic, json.dumps(msg))
client.disconnect()
#msg = {"status":"finished","bak_name":b,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
finished.append(b)
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":ENDTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
send_mqtt_message(msg)
print("Getting size of FS")
#cmnd = "du -h --max-depth=0 {}".format(BACKUP_FS)
cmnd = "ssh root@omv.home.lan 'df -h /srv/dev-disk-by-uuid-2f843500-95b6-43b0-bea1-9b67032989b8|awk '\\''{ print $3 }'\\''|tail -1'"
print(cmnd)
logging.info("Getting size of FS")
cmnd = "df -h /mnt/raid|awk '{ print $3 }'|tail -1"
logging.info(cmnd)
status, output = subprocess.getstatusoutput(cmnd)
used_space = (output.split())[0]
now = datetime.datetime.now()
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
print("Size : {}".format(used_space))
print("Sending finished status")
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
print(msg)
client.connect(broker,1883,10)
client.publish(topic_sum, json.dumps(msg))
client.disconnect()
logging.info("Size : {}".format(used_space))
logging.info("Sending finished status")
#msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":ENDTIME,"progress":0,"finished":",".join(finished),"used_space":used_space}
logging.info(msg)
if _MODE == "auto":
hostup = True
cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
status, output = subprocess.getstatusoutput(cmnd)
while hostup:
#HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
status, output = subprocess.getstatusoutput(cmnd)
# print(status)
# print(output)
send_mqtt_message(msg)
os.remove(filename.path)
if status == 0:
print(f"Backup host up, waiting - {n}\r", end="")
time.sleep(5)
n += 1
topic = "sectorq/amd/restore"
for s in servers:
#if s != "rack.home.lan":
if s == "m-server.home.lan":
continue
elif s == "nas.home.lan":
user = "admin"
cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
else:
print("Backup host down " )
hostup = False
user = "jd"
cmnd = "sudo /myapps/omv_backup.py -r all"
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
if is_port_open(s,22):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
# Add SSH host key automatically if needed.
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Connect to router using username/password authentication.
logger.info(f"Sync {s}")
print(f"Sync {s}")
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
pkey = paramiko.RSAKey.from_private_key_file("/home/jd/.ssh/id_rsa")
ssh.connect(s,
username=user,
look_for_keys=False,
allow_agent=False,
pkey=pkey)
print(cmnd)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
for line in iter(ssh_stdout.readline, ""):
logger.info(line)
print(line, end="")
for line in iter(ssh_stderr.readline, ""):
logger.info(line)
ssh.close()
try:
url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=off"
x = requests.post(url)
print(x.text)
os.remove("/backups/restore")
except:
pass
# if _MODE == "auto":
# hostup = True
# cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
# status, output = subprocess.getstatusoutput(cmnd)
# while hostup:
# #HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
# cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
# status, output = subprocess.getstatusoutput(cmnd)
# # print(status)
# # print(output)
# if status == 0:
# print(f"Backup host up, waiting - {n}\r", end="")
# time.sleep(5)
# n += 1
# else:
# print("Backup host down " )
# hostup = False
# try:
# url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=off"
# x = requests.post(url)
# print(x.text)
# except:
# pass
if _SSH_TEST:
user = "root"
cmnd = "ls -la"
topic = "sectorq/amd/backups"
for s in servers:
# if s == "m-server.home.lan":
# continue
# elif s == "nas.home.lan":
# user = "admin"
# cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
logging.info(msg)
send_mqtt_message(msg)
if s != "rack.home.lan":
continue
if is_port_open(s,22):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
# Add SSH host key automatically if needed.
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Connect to router using username/password authentication.
logger.info(f"Sync {s}")
print(f"Sync {s}")
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
pkey = paramiko.RSAKey.from_private_key_file("/home/jd/.ssh/id_rsa")
ssh.connect(s,
username=user,
look_for_keys=False,
allow_agent=False,
pkey=pkey)
print(cmnd)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
for line in iter(ssh_stdout.readline, ""):
logger.info(line)
print(line, end="")
for line in iter(ssh_stderr.readline, ""):
logger.info(line)
ssh.close()