mirror of
https://gitlab.sectorq.eu/jaydee/omv_backup.git
synced 2025-07-01 23:58:33 +02:00
lala
This commit is contained in:
190
omv_backup_v3.py
190
omv_backup_v3.py
@ -14,10 +14,11 @@ import platform
|
||||
import requests
|
||||
import fnmatch
|
||||
import yaml
|
||||
import paramiko
|
||||
from wakeonlan import send_magic_packet
|
||||
pid = os.getpid()
|
||||
|
||||
|
||||
servers = ["rpi5.home.lan","nas.home.lan","rack.home.lan","nas.home.lan"]
|
||||
host = platform.node().lower()
|
||||
#input(host)
|
||||
cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid)
|
||||
@ -52,14 +53,14 @@ mqtt_username = 'jaydee'
|
||||
mqtt_password = 'jaydee1'
|
||||
print("1")
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "amftDr:bd:sSO", ["command=", "help", "output="])
|
||||
opts, args = getopt.getopt(sys.argv[1:], "TamftDr:bd:sSO", ["command=", "help", "output="])
|
||||
except getopt.GetoptError as err:
|
||||
#usage()
|
||||
sys.exit(2)
|
||||
output = None
|
||||
# QJ : getopts
|
||||
_MODE = "manual"
|
||||
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = False
|
||||
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = _SSH_TEST = False
|
||||
_EXECUTE = True
|
||||
_DATE = "pick"
|
||||
|
||||
@ -88,7 +89,24 @@ for o, a in opts:
|
||||
_BACKUP = True
|
||||
elif o in ("-D", "--dry"):
|
||||
_EXECUTE = False
|
||||
elif o in ("-T", "--dry"):
|
||||
_SSH_TEST = True
|
||||
_LOG_LEVEL = ""
|
||||
LOG_FILE = "/var/log/omv_backup.log"
|
||||
if _LOG_LEVEL == "DEBUG":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.debug('using debug loging')
|
||||
elif _LOG_LEVEL == "ERROR":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info('using error loging')
|
||||
elif _LOG_LEVEL == "SCAN":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info('using error loging')
|
||||
else:
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info("script started")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
print("2")
|
||||
client_id = "dasdasdasd333"
|
||||
try:
|
||||
@ -146,25 +164,23 @@ BACKUP_FS = "/media/backup/"
|
||||
BACKUP_HOST = "amd.home.lan"
|
||||
#BACKUP_HOST = "morefine.home.lan"
|
||||
|
||||
print("Test connection")
|
||||
print("3")
|
||||
logging.info("Test connection")
|
||||
hm = socket.gethostbyaddr(BACKUP_HOST)
|
||||
|
||||
print("Starting")
|
||||
print(_RESTORE)
|
||||
logging.info(_RESTORE)
|
||||
def send_mqtt_message(msg):
|
||||
try:
|
||||
client.connect(broker,1883,60)
|
||||
client.publish(topic, json.dumps(msg))
|
||||
client.disconnect()
|
||||
except:
|
||||
print("Failed to send")
|
||||
logging.error("Failed to send")
|
||||
|
||||
if _SYNC:
|
||||
containers = ["HomeAssistant","webhub-web-1","heimdall","pihole","mosquitto-mosquitto-1","mailu3-redis-1","mailu3-webmail-1","mailu3-resolver-1","mailu3-antispam-1","mailu3-webdav-1","mailu3-smtp-1","mailu3-oletools-1","mailu3-front-1","mailu3-fetchmail-1","mailu3-imap-1","matter-server","piper-en","openwakeword","whisper-en","auth-worker-1","auth-server-1","auth-authentik_ldap-1","auth-redis-1","auth-postgresql-1","nginx-app-1"]
|
||||
|
||||
cmnd = f"curl -H 'Authorization: Bearer l4c1j4yd33Du5lo' 192.168.77.238:8094/v1/update"
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _START:
|
||||
@ -176,7 +192,7 @@ if _STOP:
|
||||
cmnd = "docker ps"
|
||||
status, running_containers = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
print(running_containers)
|
||||
logging.info(running_containers)
|
||||
for c in running_containers.splitlines():
|
||||
print(c.split()[-1])
|
||||
if c.split()[-1] == "watchtower-watchtower-1":
|
||||
@ -185,7 +201,7 @@ if _STOP:
|
||||
status, running_containers = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _RESTORE:
|
||||
print("Starting Restore")
|
||||
logging.info("Starting Restore")
|
||||
now = datetime.datetime.now()
|
||||
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
if _APP == "all":
|
||||
@ -196,7 +212,7 @@ if _RESTORE:
|
||||
cmnd = "ssh root@amd.home.lan 'ls /mnt/raid/backup/m-server/docker_data/latest'"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
_APP = output.splitlines()
|
||||
print(_APP)
|
||||
logging.info(_APP)
|
||||
#input("????")
|
||||
else:
|
||||
_APP = _APP.split(",")
|
||||
@ -222,7 +238,7 @@ if _RESTORE:
|
||||
dates = output.splitlines()
|
||||
n = 1
|
||||
for i in dates:
|
||||
print(f"{n} - {i}" )
|
||||
logging.info(f"{n} - {i}" )
|
||||
n += 1
|
||||
|
||||
ans = input("Pick a backup to restore : ")
|
||||
@ -230,7 +246,7 @@ if _RESTORE:
|
||||
|
||||
|
||||
if app == "fail2ban":
|
||||
print("?>?????")
|
||||
logging.info("?>?????")
|
||||
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/{_DATE}/"
|
||||
SOURCE_DIR = f"/etc/fail2ban"
|
||||
else:
|
||||
@ -247,21 +263,21 @@ if _RESTORE:
|
||||
|
||||
msg = {"status":"started","bak_name":app,"start_time":DATETIME,"end_time":"in progress", "progress":0}
|
||||
send_mqtt_message(msg)
|
||||
print("Create backup dir")
|
||||
print(cmnd)
|
||||
logging.info("Create backup dir")
|
||||
logging.info(cmnd)
|
||||
|
||||
|
||||
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
|
||||
|
||||
|
||||
if app == "heimdall":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop heimdall"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
@ -269,12 +285,12 @@ if _RESTORE:
|
||||
entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"]
|
||||
for e in entries:
|
||||
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\""
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}')
|
||||
contents = re.sub(regex, IP , output)
|
||||
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\""
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
@ -283,38 +299,38 @@ if _RESTORE:
|
||||
|
||||
|
||||
if app == "ha":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop heimdall"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print("Start docker")
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "fail2ban":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print("Start docker")
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "homepage":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
@ -326,15 +342,15 @@ if _RESTORE:
|
||||
try:
|
||||
loaded = yaml.load(stream, Loader=yaml.FullLoader)
|
||||
except yaml.YAMLError as exc:
|
||||
print(exc)
|
||||
logging.info(exc)
|
||||
|
||||
# Modify the fields from the dict
|
||||
#loaded['logo']['icon'] = "/images/morefine2.png"
|
||||
print(json.dumps(loaded, indent=2))
|
||||
logging.info(json.dumps(loaded, indent=2))
|
||||
i = 0
|
||||
for y in loaded:
|
||||
print(i)
|
||||
print(y)
|
||||
logging.info(i)
|
||||
logging.info(y)
|
||||
|
||||
if "logo" in y:
|
||||
if host == "rpi5.home.lan":
|
||||
@ -346,7 +362,7 @@ if _RESTORE:
|
||||
i+=1
|
||||
|
||||
# Save it again
|
||||
print(f"writing to file {file}")
|
||||
logging.info(f"writing to file {file}")
|
||||
with open(file, 'w') as stream:
|
||||
try:
|
||||
yaml.dump(loaded, stream, default_flow_style=False)
|
||||
@ -356,17 +372,17 @@ if _RESTORE:
|
||||
|
||||
|
||||
|
||||
print("Start docker")
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "nginx1":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop nginx-app-1"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
@ -374,33 +390,33 @@ if _RESTORE:
|
||||
domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"]
|
||||
for d in domains:
|
||||
cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"'
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
cmnd = 'egrep -l "# bazarr.sectorq.eu|# gitea.sectorq.eu|# jf.sectorq.eu|# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print(output.splitlines())
|
||||
logging.info(output.splitlines())
|
||||
for file in output.splitlines():
|
||||
print(file)
|
||||
logging.info(file)
|
||||
f = open(file)
|
||||
contents = f.read()
|
||||
f.close()
|
||||
regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";')
|
||||
contents = re.sub(regex, f'\n set $server \"{IP}\";', contents)
|
||||
#print(contents)
|
||||
print(regex)
|
||||
logging.info(regex)
|
||||
f = open(file, "w")
|
||||
contents = f.write(contents)
|
||||
f.close()
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print("Starting docker")
|
||||
logging.info("Starting docker")
|
||||
# cmnd = "docker start nginx-app-1"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
else:
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
@ -412,9 +428,9 @@ if _RESTORE:
|
||||
|
||||
now = datetime.datetime.now()
|
||||
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
print("Sending finished status")
|
||||
logging.info("Sending finished status")
|
||||
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":"?"}
|
||||
print(msg)
|
||||
logging.info(msg)
|
||||
send_mqtt_message(msg)
|
||||
|
||||
if _MODE == "auto":
|
||||
@ -429,7 +445,7 @@ if _BACKUP:
|
||||
directory = '/backups/'
|
||||
count = len(fnmatch.filter(os.listdir(directory), '*'))
|
||||
|
||||
print('File Count:', count)
|
||||
logging.info('File Count:', count)
|
||||
if count == 0:
|
||||
time.sleep(10)
|
||||
continue
|
||||
@ -444,14 +460,14 @@ if _BACKUP:
|
||||
|
||||
for filename in os.scandir(directory):
|
||||
if filename.is_file():
|
||||
print(filename.path)
|
||||
print(filename.name)
|
||||
logging.info(filename.path)
|
||||
logging.info(filename.name)
|
||||
host = filename.name
|
||||
print("Backup")
|
||||
logging.info("Backup")
|
||||
for b in backups[host]["jobs"]:
|
||||
topic = "sectorq/amd/backups"
|
||||
if not backups[host]["jobs"][b]["active"]:
|
||||
print("Backup {} is not active!".format(b))
|
||||
logging.info("Backup {} is not active!".format(b))
|
||||
msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0}
|
||||
send_mqtt_message(msg)
|
||||
continue
|
||||
@ -479,12 +495,12 @@ if _BACKUP:
|
||||
client.disconnect()
|
||||
|
||||
cmnd = "mkdir -p " + NEW_BACKUP_DIR
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print(output)
|
||||
print(status)
|
||||
print("Create backup dir")
|
||||
logging.info(output)
|
||||
logging.info(status)
|
||||
logging.info("Create backup dir")
|
||||
|
||||
|
||||
|
||||
@ -495,8 +511,8 @@ if _BACKUP:
|
||||
cmnd = f"rsync -avz --delete {BACKUP_DIR} --link-dest {FULL_BACKUP_LATEST} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
|
||||
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
#input("??????")
|
||||
if _TEST:
|
||||
|
||||
@ -515,8 +531,8 @@ if _BACKUP:
|
||||
|
||||
cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
|
||||
|
||||
print(cmnd)
|
||||
print("Removing latest link")
|
||||
logging.info(cmnd)
|
||||
logging.info("Removing latest link")
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
@ -524,7 +540,7 @@ if _BACKUP:
|
||||
cmnd = f"cd {BACKUP_ROOT}; ln -s initial latest"
|
||||
else:
|
||||
cmnd = f"cd {BACKUP_ROOT}; ln -s {DATETIME} latest"
|
||||
print("Creating new latest link")
|
||||
logging.info("Creating new latest link")
|
||||
#print(cmnd)
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
@ -532,7 +548,7 @@ if _BACKUP:
|
||||
|
||||
|
||||
#Remove old
|
||||
print("Removing old dirs")
|
||||
logging.info("Removing old dirs")
|
||||
# input("????")
|
||||
#cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR)
|
||||
cmnd = f"cd {BACKUP_ROOT} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
|
||||
@ -547,23 +563,37 @@ if _BACKUP:
|
||||
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":ENDTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
|
||||
print("Getting size of FS")
|
||||
logging.info("Getting size of FS")
|
||||
cmnd = "df -h /mnt/raid|awk '{ print $3 }'|tail -1"
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
used_space = (output.split())[0]
|
||||
now = datetime.datetime.now()
|
||||
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
print("Size : {}".format(used_space))
|
||||
print("Sending finished status")
|
||||
logging.info("Size : {}".format(used_space))
|
||||
logging.info("Sending finished status")
|
||||
#msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
|
||||
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":ENDTIME,"progress":0,"finished":",".join(finished),"used_space":used_space}
|
||||
print(msg)
|
||||
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
|
||||
os.remove(filename.path)
|
||||
|
||||
for s in servers:
|
||||
if s == "m-server.home.lan":
|
||||
continue
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
# Add SSH host key automatically if needed.
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# Connect to router using username/password authentication.
|
||||
ssh.connect(s,
|
||||
username="jd",
|
||||
look_for_keys=True,
|
||||
allow_agent=False,
|
||||
key_filename="/root/.ssh/id_rsa")
|
||||
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('touch /tmp/lala')
|
||||
ssh.close()
|
||||
# if _MODE == "auto":
|
||||
# hostup = True
|
||||
# cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
|
||||
@ -594,5 +624,19 @@ if _BACKUP:
|
||||
# print(x.text)
|
||||
# except:
|
||||
# pass
|
||||
|
||||
|
||||
if _SSH_TEST:
|
||||
for s in servers:
|
||||
if s == "m-server.home.lan":
|
||||
continue
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
# Add SSH host key automatically if needed.
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# Connect to router using username/password authentication.
|
||||
ssh.connect(s,
|
||||
username="jd",
|
||||
look_for_keys=True,
|
||||
allow_agent=False,
|
||||
key_filename="/root/.ssh/id_rsa")
|
||||
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('touch /tmp/lala')
|
||||
ssh.close()
|
Reference in New Issue
Block a user