mirror of
https://gitlab.sectorq.eu/jaydee/omv_backup.git
synced 2025-07-01 15:48:33 +02:00
Compare commits
47 Commits
7eb92d7c6a
...
6b7812d084
Author | SHA1 | Date | |
---|---|---|---|
6b7812d084 | |||
b9e71fee88 | |||
142d46a902 | |||
bfaa102940 | |||
1388e92c2c | |||
35a7402bad | |||
170ae431a1 | |||
1b481e9a81 | |||
b8f27f9ab2 | |||
54f8a9d29d | |||
93b59d34df | |||
fb3fa9d306 | |||
9f7b851ce8 | |||
55d494a02a | |||
cca2137c0c | |||
d8ca1bfbd3 | |||
726c2c3e36 | |||
669d821a49 | |||
5e98e88d4e | |||
82f741faf9 | |||
972f799f96 | |||
df3ddea2a9 | |||
7a6c2de3d1 | |||
e7d222033d | |||
82b8053e2c | |||
c4ab70bf0f | |||
609f94e659 | |||
5b20f9d190 | |||
002c82f549 | |||
4f2e8e00b0 | |||
3c15ca0222 | |||
a9a1c3c0fa | |||
e1ee9febac | |||
f9ae8bc1f9 | |||
6adaaa19d5 | |||
c1b43317a0 | |||
c17e10d455 | |||
9f1992f098 | |||
b9c99befab | |||
aaf9ab523b | |||
d08b79401b | |||
8abe9d0678 | |||
3566afbe9e | |||
f9bc564b96 | |||
a0eb6f544d | |||
5d07cca0e2 | |||
bbf0e360ef |
9
.gitlab-ci.yml
Normal file
9
.gitlab-ci.yml
Normal file
@ -0,0 +1,9 @@
|
||||
stages:
|
||||
- build
|
||||
|
||||
build_job:
|
||||
stage: build
|
||||
script:
|
||||
- echo "Running build pipeline"
|
||||
|
||||
|
0
docker_backups.py
Normal file → Executable file
0
docker_backups.py
Normal file → Executable file
146
omv_backup.log
Executable file
146
omv_backup.log
Executable file
@ -0,0 +1,146 @@
|
||||
04/06/2025 01:28:15 AM : INFO : script started
|
||||
04/06/2025 01:28:15 AM : INFO : Test connection
|
||||
04/06/2025 01:28:15 AM : INFO : False
|
||||
04/06/2025 01:28:34 AM : INFO : script started
|
||||
04/06/2025 01:28:34 AM : INFO : Test connection
|
||||
04/06/2025 01:28:34 AM : INFO : False
|
||||
04/06/2025 01:28:34 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rpi5.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:28:34 AM : INFO : Sync rpi5.home.lan
|
||||
04/06/2025 01:28:34 AM : INFO : Connected (version 2.0, client OpenSSH_9.2p1)
|
||||
04/06/2025 01:29:00 AM : INFO : script started
|
||||
04/06/2025 01:29:00 AM : INFO : Test connection
|
||||
04/06/2025 01:29:00 AM : INFO : False
|
||||
04/06/2025 01:29:00 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rpi5.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:29:00 AM : INFO : Sync rpi5.home.lan
|
||||
04/06/2025 01:29:00 AM : INFO : Connected (version 2.0, client OpenSSH_9.2p1)
|
||||
04/06/2025 01:29:44 AM : INFO : script started
|
||||
04/06/2025 01:29:44 AM : INFO : Test connection
|
||||
04/06/2025 01:29:44 AM : INFO : False
|
||||
04/06/2025 01:29:44 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rpi5.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:29:44 AM : INFO : Sync rpi5.home.lan
|
||||
04/06/2025 01:29:44 AM : INFO : Connected (version 2.0, client OpenSSH_9.2p1)
|
||||
04/06/2025 01:29:44 AM : INFO : Authentication (publickey) successful!
|
||||
04/06/2025 01:29:44 AM : INFO : total 148
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwx------ 14 root root 4096 Apr 3 22:07 .
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 24 root root 4096 Dec 16 21:29 ..
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 3 root root 4096 Aug 18 2024 .ansible
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw------- 1 root root 16245 Apr 1 09:42 .bash_history
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 571 Apr 10 2021 .bashrc
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwx------ 4 root root 4096 Dec 9 22:06 .cache
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 2 root root 4096 Mar 11 19:04 cert
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwx------ 5 root root 4096 Dec 16 20:05 .config
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 996 Nov 9 16:17 crontab?raw=true
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 3 root root 4096 Sep 4 2024 .dotnet
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 21596 Aug 17 2024 get-docker.sh
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwx------ 3 root root 4096 Dec 19 11:26 .gnupg
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 178 Apr 25 2023 index.html
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw------- 1 root root 20 Apr 3 22:07 .lesshst
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 3 root root 4096 Nov 14 18:20 .local
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 12653 Dec 16 21:27 omv.py
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 161 Jul 9 2019 .profile
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 6 root root 4096 Dec 9 22:55 python
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw------- 1 root root 300 Dec 16 22:45 .python_history
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwx------ 2 root root 4096 Mar 11 18:23 .ssh
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 0 Dec 16 20:54 .sudo_as_admin_successful
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 5 root root 4096 Dec 9 22:06 venv
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwx------ 3 root root 4096 Jul 4 2024 .vnc
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : drwxr-xr-x 5 root root 4096 Nov 14 09:53 .vscode-server
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 268 Nov 12 20:10 .wget-hsts
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : -rw-r--r-- 1 root root 6008 Jun 3 2024 zabbix-release_7.0-1+debian11_all.deb
|
||||
|
||||
04/06/2025 01:29:44 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'nas.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:29:44 AM : INFO : Sync nas.home.lan
|
||||
04/06/2025 01:29:44 AM : INFO : Connected (version 2.0, client OpenSSH_9.9)
|
||||
04/06/2025 01:29:44 AM : INFO : Authentication (publickey) successful!
|
||||
04/06/2025 01:30:37 AM : INFO : script started
|
||||
04/06/2025 01:30:37 AM : INFO : Test connection
|
||||
04/06/2025 01:30:37 AM : INFO : False
|
||||
04/06/2025 01:30:37 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rpi5.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:30:37 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'nas.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:30:37 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rack.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:30:37 AM : INFO : Sync rack.home.lan
|
||||
04/06/2025 01:30:37 AM : INFO : Connected (version 2.0, client OpenSSH_9.2p1)
|
||||
04/06/2025 01:30:37 AM : INFO : Authentication (publickey) failed.
|
||||
04/06/2025 01:30:37 AM : ERROR : Unknown exception: q must be exactly 160, 224, or 256 bits long
|
||||
04/06/2025 01:30:37 AM : ERROR : Traceback (most recent call last):
|
||||
04/06/2025 01:30:37 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/transport.py", line 2262, in run
|
||||
04/06/2025 01:30:37 AM : ERROR : handler(m)
|
||||
04/06/2025 01:30:37 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/auth_handler.py", line 404, in _parse_service_accept
|
||||
04/06/2025 01:30:37 AM : ERROR : sig = self.private_key.sign_ssh_data(blob, algorithm)
|
||||
04/06/2025 01:30:37 AM : ERROR : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
04/06/2025 01:30:37 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/dsskey.py", line 120, in sign_ssh_data
|
||||
04/06/2025 01:30:37 AM : ERROR : ).private_key(backend=default_backend())
|
||||
04/06/2025 01:30:37 AM : ERROR : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
04/06/2025 01:30:37 AM : ERROR : ValueError: q must be exactly 160, 224, or 256 bits long
|
||||
04/06/2025 01:30:37 AM : ERROR :
|
||||
04/06/2025 01:31:44 AM : INFO : script started
|
||||
04/06/2025 01:31:44 AM : INFO : Test connection
|
||||
04/06/2025 01:31:44 AM : INFO : False
|
||||
04/06/2025 01:31:44 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rpi5.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:31:44 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'nas.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:31:44 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rack.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:31:44 AM : INFO : Sync rack.home.lan
|
||||
04/06/2025 01:31:44 AM : INFO : Connected (version 2.0, client OpenSSH_9.2p1)
|
||||
04/06/2025 01:31:44 AM : INFO : Authentication (publickey) failed.
|
||||
04/06/2025 01:31:44 AM : ERROR : Unknown exception: q must be exactly 160, 224, or 256 bits long
|
||||
04/06/2025 01:31:44 AM : ERROR : Traceback (most recent call last):
|
||||
04/06/2025 01:31:44 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/transport.py", line 2262, in run
|
||||
04/06/2025 01:31:44 AM : ERROR : handler(m)
|
||||
04/06/2025 01:31:44 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/auth_handler.py", line 404, in _parse_service_accept
|
||||
04/06/2025 01:31:44 AM : ERROR : sig = self.private_key.sign_ssh_data(blob, algorithm)
|
||||
04/06/2025 01:31:44 AM : ERROR : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
04/06/2025 01:31:44 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/dsskey.py", line 120, in sign_ssh_data
|
||||
04/06/2025 01:31:44 AM : ERROR : ).private_key(backend=default_backend())
|
||||
04/06/2025 01:31:44 AM : ERROR : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
04/06/2025 01:31:44 AM : ERROR : ValueError: q must be exactly 160, 224, or 256 bits long
|
||||
04/06/2025 01:31:44 AM : ERROR :
|
||||
04/06/2025 01:32:42 AM : INFO : script started
|
||||
04/06/2025 01:32:42 AM : INFO : Test connection
|
||||
04/06/2025 01:32:42 AM : INFO : False
|
||||
04/06/2025 01:32:52 AM : INFO : script started
|
||||
04/06/2025 01:32:52 AM : INFO : Test connection
|
||||
04/06/2025 01:32:52 AM : INFO : False
|
||||
04/06/2025 01:32:52 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rpi5.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:32:52 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'nas.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:32:52 AM : INFO : {'mode': 'manual', 'status': 'restore', 'bak_name': 's', 'host': 'rack.home.lan', 'cur_job': 'aaa', 'start_time': 1, 'end_time': 1, 'progress': 0, 'finished': 1, 'used_space': 1}
|
||||
04/06/2025 01:32:52 AM : INFO : Sync rack.home.lan
|
||||
04/06/2025 01:32:52 AM : INFO : Connected (version 2.0, client OpenSSH_9.2p1)
|
||||
04/06/2025 01:32:52 AM : INFO : Authentication (publickey) failed.
|
||||
04/06/2025 01:32:52 AM : ERROR : Unknown exception: q must be exactly 160, 224, or 256 bits long
|
||||
04/06/2025 01:32:52 AM : ERROR : Traceback (most recent call last):
|
||||
04/06/2025 01:32:52 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/transport.py", line 2262, in run
|
||||
04/06/2025 01:32:52 AM : ERROR : handler(m)
|
||||
04/06/2025 01:32:52 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/auth_handler.py", line 404, in _parse_service_accept
|
||||
04/06/2025 01:32:52 AM : ERROR : sig = self.private_key.sign_ssh_data(blob, algorithm)
|
||||
04/06/2025 01:32:52 AM : ERROR : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
04/06/2025 01:32:52 AM : ERROR : File "/home/jd/projects/omv_backup/venv/lib/python3.11/site-packages/paramiko/dsskey.py", line 120, in sign_ssh_data
|
||||
04/06/2025 01:32:52 AM : ERROR : ).private_key(backend=default_backend())
|
||||
04/06/2025 01:32:52 AM : ERROR : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
04/06/2025 01:32:52 AM : ERROR : ValueError: q must be exactly 160, 224, or 256 bits long
|
||||
04/06/2025 01:32:52 AM : ERROR :
|
0
omv_backup.py
Normal file → Executable file
0
omv_backup.py
Normal file → Executable file
0
omv_backup_v2.py
Normal file → Executable file
0
omv_backup_v2.py
Normal file → Executable file
0
omv_backup_v2.pyw
Normal file → Executable file
0
omv_backup_v2.pyw
Normal file → Executable file
318
omv_backup_v3.py
Normal file → Executable file
318
omv_backup_v3.py
Normal file → Executable file
@ -7,6 +7,7 @@ import json
|
||||
import time
|
||||
import socket
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE, CalledProcessError
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
@ -14,10 +15,23 @@ import platform
|
||||
import requests
|
||||
import fnmatch
|
||||
import yaml
|
||||
import paramiko
|
||||
file_path = os.path.realpath(__file__)
|
||||
dir_path = os.path.dirname(file_path)
|
||||
|
||||
print(file_path)
|
||||
print(dir_path)
|
||||
os.chdir(dir_path)
|
||||
from wakeonlan import send_magic_packet
|
||||
pid = os.getpid()
|
||||
|
||||
|
||||
def is_port_open(host, port):
|
||||
try:
|
||||
sock = socket.create_connection((host, port))
|
||||
sock.close()
|
||||
return True
|
||||
except socket.error:
|
||||
return False
|
||||
servers = ["rpi5.home.lan","nas.home.lan","rack.home.lan","m-server.home.lan"]
|
||||
host = platform.node().lower()
|
||||
#input(host)
|
||||
cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid)
|
||||
@ -52,22 +66,24 @@ mqtt_username = 'jaydee'
|
||||
mqtt_password = 'jaydee1'
|
||||
print("1")
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "amftDr:bd:sSO", ["command=", "help", "output="])
|
||||
opts, args = getopt.getopt(sys.argv[1:], "TamftDr:bd:sSOl:", ["command=", "help", "output="])
|
||||
except getopt.GetoptError as err:
|
||||
#usage()
|
||||
sys.exit(2)
|
||||
output = None
|
||||
# QJ : getopts
|
||||
_MODE = "manual"
|
||||
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = False
|
||||
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = _SSH_TEST = False
|
||||
_EXECUTE = True
|
||||
_DATE = "pick"
|
||||
|
||||
_LOG_LEVEL = ""
|
||||
for o, a in opts:
|
||||
if o == "-a":
|
||||
_MODE = "auto"
|
||||
elif o in ("-m", "--manual"):
|
||||
_MODE = "manual"
|
||||
elif o in ("-l", "--level"):
|
||||
_LOG_LEVEL = a.upper()
|
||||
elif o in ("-f", "--first"):
|
||||
_FIRST = True
|
||||
elif o in ("-d", "--date"):
|
||||
@ -88,7 +104,24 @@ for o, a in opts:
|
||||
_BACKUP = True
|
||||
elif o in ("-D", "--dry"):
|
||||
_EXECUTE = False
|
||||
|
||||
elif o in ("-T", "--dry"):
|
||||
_SSH_TEST = True
|
||||
|
||||
LOG_FILE = "omv_backup.log"
|
||||
if _LOG_LEVEL == "DEBUG":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.debug('using debug loging')
|
||||
elif _LOG_LEVEL == "ERROR":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info('using error loging')
|
||||
elif _LOG_LEVEL == "SCAN":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info('using error loging')
|
||||
else:
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info("script started")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
print("2")
|
||||
client_id = "dasdasdasd333"
|
||||
try:
|
||||
@ -146,25 +179,25 @@ BACKUP_FS = "/media/backup/"
|
||||
BACKUP_HOST = "amd.home.lan"
|
||||
#BACKUP_HOST = "morefine.home.lan"
|
||||
|
||||
print("Test connection")
|
||||
print("3")
|
||||
logging.info("Test connection")
|
||||
hm = socket.gethostbyaddr(BACKUP_HOST)
|
||||
|
||||
print("Starting")
|
||||
print(_RESTORE)
|
||||
logging.info(_RESTORE)
|
||||
def send_mqtt_message(msg):
|
||||
try:
|
||||
client.connect(broker,1883,60)
|
||||
client.publish(topic, json.dumps(msg))
|
||||
client.disconnect()
|
||||
except:
|
||||
except ValueError as e:
|
||||
logging.error("Failed to send")
|
||||
print("Failed to send")
|
||||
print(e)
|
||||
|
||||
if _SYNC:
|
||||
containers = ["HomeAssistant","webhub-web-1","heimdall","pihole","mosquitto-mosquitto-1","mailu3-redis-1","mailu3-webmail-1","mailu3-resolver-1","mailu3-antispam-1","mailu3-webdav-1","mailu3-smtp-1","mailu3-oletools-1","mailu3-front-1","mailu3-fetchmail-1","mailu3-imap-1","matter-server","piper-en","openwakeword","whisper-en","auth-worker-1","auth-server-1","auth-authentik_ldap-1","auth-redis-1","auth-postgresql-1","nginx-app-1"]
|
||||
|
||||
cmnd = f"curl -H 'Authorization: Bearer l4c1j4yd33Du5lo' 192.168.77.238:8094/v1/update"
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _START:
|
||||
@ -176,7 +209,7 @@ if _STOP:
|
||||
cmnd = "docker ps"
|
||||
status, running_containers = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
print(running_containers)
|
||||
logging.info(running_containers)
|
||||
for c in running_containers.splitlines():
|
||||
print(c.split()[-1])
|
||||
if c.split()[-1] == "watchtower-watchtower-1":
|
||||
@ -185,6 +218,7 @@ if _STOP:
|
||||
status, running_containers = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _RESTORE:
|
||||
logging.info("Starting Restore")
|
||||
print("Starting Restore")
|
||||
now = datetime.datetime.now()
|
||||
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
@ -196,7 +230,7 @@ if _RESTORE:
|
||||
cmnd = "ssh root@amd.home.lan 'ls /mnt/raid/backup/m-server/docker_data/latest'"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
_APP = output.splitlines()
|
||||
print(_APP)
|
||||
logging.info(_APP)
|
||||
#input("????")
|
||||
else:
|
||||
_APP = _APP.split(",")
|
||||
@ -222,7 +256,7 @@ if _RESTORE:
|
||||
dates = output.splitlines()
|
||||
n = 1
|
||||
for i in dates:
|
||||
print(f"{n} - {i}" )
|
||||
logging.info(f"{n} - {i}" )
|
||||
n += 1
|
||||
|
||||
ans = input("Pick a backup to restore : ")
|
||||
@ -230,7 +264,7 @@ if _RESTORE:
|
||||
|
||||
|
||||
if app == "fail2ban":
|
||||
print("?>?????")
|
||||
logging.info("?>?????")
|
||||
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/{_DATE}/"
|
||||
SOURCE_DIR = f"/etc/fail2ban"
|
||||
else:
|
||||
@ -247,34 +281,36 @@ if _RESTORE:
|
||||
|
||||
msg = {"status":"started","bak_name":app,"start_time":DATETIME,"end_time":"in progress", "progress":0}
|
||||
send_mqtt_message(msg)
|
||||
print("Create backup dir")
|
||||
print(cmnd)
|
||||
logging.info("Create backup dir")
|
||||
logging.info(cmnd)
|
||||
|
||||
|
||||
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
|
||||
|
||||
|
||||
if app == "heimdall":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop heimdall"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
|
||||
entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"]
|
||||
for e in entries:
|
||||
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\""
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}')
|
||||
contents = re.sub(regex, IP , output)
|
||||
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\""
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
@ -283,38 +319,38 @@ if _RESTORE:
|
||||
|
||||
|
||||
if app == "ha":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop heimdall"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print("Start docker")
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "fail2ban":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print("Start docker")
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "homepage":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
@ -326,15 +362,15 @@ if _RESTORE:
|
||||
try:
|
||||
loaded = yaml.load(stream, Loader=yaml.FullLoader)
|
||||
except yaml.YAMLError as exc:
|
||||
print(exc)
|
||||
logging.info(exc)
|
||||
|
||||
# Modify the fields from the dict
|
||||
#loaded['logo']['icon'] = "/images/morefine2.png"
|
||||
print(json.dumps(loaded, indent=2))
|
||||
logging.info(json.dumps(loaded, indent=2))
|
||||
i = 0
|
||||
for y in loaded:
|
||||
print(i)
|
||||
print(y)
|
||||
logging.info(i)
|
||||
logging.info(y)
|
||||
|
||||
if "logo" in y:
|
||||
if host == "rpi5.home.lan":
|
||||
@ -346,7 +382,7 @@ if _RESTORE:
|
||||
i+=1
|
||||
|
||||
# Save it again
|
||||
print(f"writing to file {file}")
|
||||
logging.info(f"writing to file {file}")
|
||||
with open(file, 'w') as stream:
|
||||
try:
|
||||
yaml.dump(loaded, stream, default_flow_style=False)
|
||||
@ -356,17 +392,17 @@ if _RESTORE:
|
||||
|
||||
|
||||
|
||||
print("Start docker")
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "nginx1":
|
||||
print("Stopping docker")
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop nginx-app-1"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
@ -374,33 +410,33 @@ if _RESTORE:
|
||||
domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"]
|
||||
for d in domains:
|
||||
cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"'
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
cmnd = 'egrep -l "# bazarr.sectorq.eu|# gitea.sectorq.eu|# jf.sectorq.eu|# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print(output.splitlines())
|
||||
logging.info(output.splitlines())
|
||||
for file in output.splitlines():
|
||||
print(file)
|
||||
logging.info(file)
|
||||
f = open(file)
|
||||
contents = f.read()
|
||||
f.close()
|
||||
regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";')
|
||||
contents = re.sub(regex, f'\n set $server \"{IP}\";', contents)
|
||||
#print(contents)
|
||||
print(regex)
|
||||
logging.info(regex)
|
||||
f = open(file, "w")
|
||||
contents = f.write(contents)
|
||||
f.close()
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print("Starting docker")
|
||||
logging.info("Starting docker")
|
||||
# cmnd = "docker start nginx-app-1"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
else:
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
@ -412,24 +448,23 @@ if _RESTORE:
|
||||
|
||||
now = datetime.datetime.now()
|
||||
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
print("Sending finished status")
|
||||
logging.info("Sending finished status")
|
||||
msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":"?"}
|
||||
print(msg)
|
||||
logging.info(msg)
|
||||
send_mqtt_message(msg)
|
||||
|
||||
if _MODE == "auto":
|
||||
cmnd = "ssh root@amd.home.lan 'systemctl suspend &'"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
|
||||
|
||||
if _BACKUP:
|
||||
|
||||
last = 1
|
||||
while True:
|
||||
directory = '/backups/'
|
||||
count = len(fnmatch.filter(os.listdir(directory), '*'))
|
||||
|
||||
print('File Count:', count)
|
||||
if last != count:
|
||||
logging.info(f'File Count: {count}')
|
||||
last = count
|
||||
if count == 0:
|
||||
time.sleep(10)
|
||||
continue
|
||||
@ -437,6 +472,7 @@ if _BACKUP:
|
||||
finished = []
|
||||
now = datetime.datetime.now()
|
||||
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
topic = "sectorq/amd/backups"
|
||||
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":"","cur_job":"","start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
# iterate over files in
|
||||
@ -444,14 +480,14 @@ if _BACKUP:
|
||||
|
||||
for filename in os.scandir(directory):
|
||||
if filename.is_file():
|
||||
print(filename.path)
|
||||
print(filename.name)
|
||||
logging.info(filename.path)
|
||||
logging.info(filename.name)
|
||||
host = filename.name
|
||||
print("Backup")
|
||||
logging.info("Backup")
|
||||
for b in backups[host]["jobs"]:
|
||||
topic = "sectorq/amd/backups"
|
||||
|
||||
if not backups[host]["jobs"][b]["active"]:
|
||||
print("Backup {} is not active!".format(b))
|
||||
logging.info("Backup {} is not active!".format(b))
|
||||
msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0}
|
||||
send_mqtt_message(msg)
|
||||
continue
|
||||
@ -479,44 +515,54 @@ if _BACKUP:
|
||||
client.disconnect()
|
||||
|
||||
cmnd = "mkdir -p " + NEW_BACKUP_DIR
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
print(output)
|
||||
print(status)
|
||||
print("Create backup dir")
|
||||
logging.info(output)
|
||||
logging.info(status)
|
||||
logging.info("Create backup dir")
|
||||
|
||||
cmnd = f"ssh {BACKUP_HOST} 'ls {SOURCE_DIR}'"
|
||||
logger.debug(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logger.debug(output)
|
||||
apps = output.splitlines()
|
||||
c = len(apps)
|
||||
print(apps)
|
||||
print(len(apps))
|
||||
|
||||
step = round(100 / c,1)
|
||||
progress = 0
|
||||
for a in apps:
|
||||
logging.info(f"App {a}")
|
||||
topic = "sectorq/amd/backups"
|
||||
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
logger.debug(cmnd)
|
||||
if _FIRST:
|
||||
cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
|
||||
else:
|
||||
cmnd = f"rsync -avz --delete {BACKUP_DIR}{a} --link-dest {FULL_BACKUP_LATEST}/{a} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
|
||||
|
||||
|
||||
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
|
||||
if _FIRST:
|
||||
cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
|
||||
else:
|
||||
cmnd = f"rsync -avz --delete {BACKUP_DIR} --link-dest {FULL_BACKUP_LATEST} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
|
||||
|
||||
ans = "y"
|
||||
print(cmnd)
|
||||
print("Sync files")
|
||||
#input("??????")
|
||||
if _TEST:
|
||||
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
|
||||
# rsync --info=progress2 -avz --delete /share/docker_data/ --link-dest /m-server/docker_data/latest --exclude="gitlab/data/" --exclude="esphome/config/.esphome" --exclude="gitlab/logs/prometheus" --exclude=".cache" --exclude=".git" --exclude="var_lib_motioneye" /m-server/m-server/docker_data/newone1
|
||||
|
||||
|
||||
# input("????")
|
||||
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
#proc = subprocess.Popen(cmnd,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd = "/myapps/",shell=True)
|
||||
|
||||
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files1")
|
||||
#input("??????")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
#proc = subprocess.Popen(cmnd,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd = "/myapps/",shell=True)
|
||||
progress = progress + step
|
||||
topic = "sectorq/amd/backups"
|
||||
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
|
||||
|
||||
cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
|
||||
|
||||
print(cmnd)
|
||||
print("Removing latest link")
|
||||
logging.info(cmnd)
|
||||
logging.info("Removing latest link")
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
@ -524,7 +570,7 @@ if _BACKUP:
|
||||
cmnd = f"cd {BACKUP_ROOT}; ln -s initial latest"
|
||||
else:
|
||||
cmnd = f"cd {BACKUP_ROOT}; ln -s {DATETIME} latest"
|
||||
print("Creating new latest link")
|
||||
logging.info("Creating new latest link")
|
||||
#print(cmnd)
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
@ -532,7 +578,7 @@ if _BACKUP:
|
||||
|
||||
|
||||
#Remove old
|
||||
print("Removing old dirs")
|
||||
logging.info("Removing old dirs")
|
||||
# input("????")
|
||||
#cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR)
|
||||
cmnd = f"cd {BACKUP_ROOT} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
|
||||
@ -547,23 +593,56 @@ if _BACKUP:
|
||||
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":ENDTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
|
||||
print("Getting size of FS")
|
||||
logging.info("Getting size of FS")
|
||||
cmnd = "df -h /mnt/raid|awk '{ print $3 }'|tail -1"
|
||||
print(cmnd)
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
used_space = (output.split())[0]
|
||||
now = datetime.datetime.now()
|
||||
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
print("Size : {}".format(used_space))
|
||||
print("Sending finished status")
|
||||
logging.info("Size : {}".format(used_space))
|
||||
logging.info("Sending finished status")
|
||||
#msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
|
||||
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":ENDTIME,"progress":0,"finished":",".join(finished),"used_space":used_space}
|
||||
print(msg)
|
||||
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
|
||||
os.remove(filename.path)
|
||||
|
||||
user = "root"
|
||||
cmnd = "/myapps/venv/bin/python3 /myapps/omv_backup.py -r all"
|
||||
topic = "sectorq/amd/backups"
|
||||
for s in servers:
|
||||
if s == "m-server.home.lan":
|
||||
continue
|
||||
elif s == "nas.home.lan":
|
||||
user = "admin"
|
||||
cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
|
||||
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
if is_port_open(s,22):
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
# Add SSH host key automatically if needed.
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# Connect to router using username/password authentication.
|
||||
logger.info(f"Sync {s}")
|
||||
print(f"Sync {s}")
|
||||
ssh.connect(s,
|
||||
username=user,
|
||||
look_for_keys=True,
|
||||
allow_agent=False,
|
||||
key_filename="/root/.ssh/id_rsa")
|
||||
print(cmnd)
|
||||
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
|
||||
for line in iter(ssh_stdout.readline, ""):
|
||||
logger.info(line)
|
||||
print(line, end="")
|
||||
for line in iter(ssh_stderr.readline, ""):
|
||||
logger.info(line)
|
||||
ssh.close()
|
||||
# if _MODE == "auto":
|
||||
# hostup = True
|
||||
# cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
|
||||
@ -594,5 +673,38 @@ if _BACKUP:
|
||||
# print(x.text)
|
||||
# except:
|
||||
# pass
|
||||
|
||||
|
||||
if _SSH_TEST:
|
||||
user = "root"
|
||||
cmnd = "/myapps/venv/bin/python3 /myapps/omv_backup.py -r all"
|
||||
topic = "sectorq/amd/backups"
|
||||
for s in servers:
|
||||
if s == "m-server.home.lan":
|
||||
continue
|
||||
elif s == "nas.home.lan":
|
||||
user = "admin"
|
||||
cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
|
||||
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
if is_port_open(s,22):
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
# Add SSH host key automatically if needed.
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# Connect to router using username/password authentication.
|
||||
logger.info(f"Sync {s}")
|
||||
print(f"Sync {s}")
|
||||
ssh.connect(s,
|
||||
username=user,
|
||||
look_for_keys=True,
|
||||
allow_agent=False,
|
||||
key_filename="/root/.ssh/id_rsa")
|
||||
print(cmnd)
|
||||
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
|
||||
for line in iter(ssh_stdout.readline, ""):
|
||||
logger.info(line)
|
||||
print(line, end="")
|
||||
for line in iter(ssh_stderr.readline, ""):
|
||||
logger.info(line)
|
||||
ssh.close()
|
746
omv_backup_v4.py
Executable file
746
omv_backup_v4.py
Executable file
@ -0,0 +1,746 @@
|
||||
#!/myapps/venv/bin/python3
|
||||
import datetime
|
||||
import logging
|
||||
from paho.mqtt import client as mqtt_client
|
||||
import getopt
|
||||
import json
|
||||
import time
|
||||
import socket
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE, CalledProcessError
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
import requests
|
||||
import fnmatch
|
||||
import yaml
|
||||
import paramiko
|
||||
import numpy as np
|
||||
file_path = os.path.realpath(__file__)
|
||||
dir_path = os.path.dirname(file_path)
|
||||
VERSION="1.0.1"
|
||||
print(file_path)
|
||||
print(dir_path)
|
||||
os.chdir(dir_path)
|
||||
from wakeonlan import send_magic_packet
|
||||
pid = os.getpid()
|
||||
def is_port_open(host, port):
|
||||
try:
|
||||
sock = socket.create_connection((host, port))
|
||||
sock.close()
|
||||
return True
|
||||
except socket.error:
|
||||
return False
|
||||
servers = ["rpi5.home.lan","nas.home.lan","rack.home.lan","m-server.home.lan"]
|
||||
host = platform.node().lower()
|
||||
#input(host)
|
||||
cmnd = "ps -ef|grep omv_backups.py|grep -v grep |grep -v {}|wc -l".format(pid)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
if int(output) > 0:
|
||||
print("Running already!")
|
||||
sys.exit()
|
||||
def is_port_open(host, port):
|
||||
try:
|
||||
sock = socket.create_connection((host, port))
|
||||
sock.close()
|
||||
return True
|
||||
except socket.error:
|
||||
return False
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
# doesn't even have to be reachable
|
||||
conn = False
|
||||
while not conn:
|
||||
try:
|
||||
s.connect(('192.168.77.1', 1))
|
||||
IP = s.getsockname()[0]
|
||||
print(IP)
|
||||
print(output)
|
||||
conn = True
|
||||
except:
|
||||
time.sleep(5)
|
||||
|
||||
broker = 'mqtt.home.lan'
|
||||
port = 1883
|
||||
topic_sum = "sectorq/amd/backups"
|
||||
mqtt_username = 'jaydee'
|
||||
mqtt_password = 'jaydee1'
|
||||
print("1")
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "hTamftDr:bd:sSOl:", ["command=", "help", "output="])
|
||||
except getopt.GetoptError as err:
|
||||
#usage()
|
||||
sys.exit(2)
|
||||
output = None
|
||||
# QJ : getopts
|
||||
_MODE = "manual"
|
||||
_FIRST = _TEST = _RESTORE = _BACKUP = _SYNC = _START = _STOP = _SSH_TEST = False
|
||||
_EXECUTE = True
|
||||
_DATE = "pick"
|
||||
_LOG_LEVEL = ""
|
||||
for o, a in opts:
|
||||
if o == "-a":
|
||||
_MODE = "auto"
|
||||
elif o in ("-m", "--manual"):
|
||||
_MODE = "manual"
|
||||
elif o in ("-l", "--level"):
|
||||
_LOG_LEVEL = a.upper()
|
||||
elif o in ("-f", "--first"):
|
||||
_FIRST = True
|
||||
elif o in ("-d", "--date"):
|
||||
_DATE = a
|
||||
elif o in ("-t", "--test"):
|
||||
_TEST = True
|
||||
elif o in ("-s", "--sync"):
|
||||
_SYNC = True
|
||||
elif o in ("-S", "--start"):
|
||||
_START = True
|
||||
elif o in ("-O", "--stop"):
|
||||
_STOP = True
|
||||
elif o in ("-r", "--restore"):
|
||||
_RESTORE = True
|
||||
_APP = a
|
||||
print("RESTORE")
|
||||
elif o in ("-b", "--backup"):
|
||||
_BACKUP = True
|
||||
elif o in ("-D", "--dry"):
|
||||
_EXECUTE = False
|
||||
elif o in ("-T", "--dry"):
|
||||
_SSH_TEST = True
|
||||
elif o in ("-h", "--help"):
|
||||
print(VERSION)
|
||||
sys.exit()
|
||||
LOG_FILE = "omv_backup.log"
|
||||
if _LOG_LEVEL == "DEBUG":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.debug('using debug loging')
|
||||
elif _LOG_LEVEL == "ERROR":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info('using error loging')
|
||||
elif _LOG_LEVEL == "SCAN":
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info('using error loging')
|
||||
else:
|
||||
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
logging.info("script started")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
print("2")
|
||||
client_id = "dasdasdasd333"
|
||||
try:
|
||||
client = mqtt_client.Client(mqtt_client.CallbackAPIVersion.VERSION1, client_id)
|
||||
except:
|
||||
client = mqtt_client.Client()
|
||||
client.username_pw_set(mqtt_username, mqtt_password)
|
||||
|
||||
|
||||
backups = {
|
||||
"nas": {
|
||||
"login": "admin@nas.home.lan",
|
||||
"jobs": {
|
||||
"github":
|
||||
{"source":"/share/Data/__GITHUB",
|
||||
"exclude":"",
|
||||
"active": True
|
||||
},
|
||||
"photo": {
|
||||
"source":"/share/Photo/Years",
|
||||
"exclude":"",
|
||||
"active":True
|
||||
}
|
||||
}
|
||||
},
|
||||
"m-server":{
|
||||
"login": "root@m-server.home.lan",
|
||||
"jobs": {
|
||||
"docker_data":{
|
||||
"source":"/share/docker_data/",
|
||||
"exclude":"",
|
||||
"active":True
|
||||
},
|
||||
"fail2ban":{
|
||||
"source":"/etc/fail2ban/",
|
||||
"exclude":"",
|
||||
"active":True
|
||||
}
|
||||
}
|
||||
},
|
||||
"rpi5.home.lan":{
|
||||
"docker_data":{
|
||||
"source":"/share/docker_data/",
|
||||
"exclude":"",
|
||||
"active":True
|
||||
},
|
||||
"fail2ban":{
|
||||
"source":"/etc/fail2ban/",
|
||||
"exclude":"",
|
||||
"active":True
|
||||
}
|
||||
}
|
||||
}
|
||||
BACKUP_FS = "/media/backup/"
|
||||
BACKUP_HOST = "amd.home.lan"
|
||||
#BACKUP_HOST = "morefine.home.lan"
|
||||
|
||||
logging.info("Test connection")
|
||||
hm = socket.gethostbyaddr(BACKUP_HOST)
|
||||
|
||||
logging.info(_RESTORE)
|
||||
def send_mqtt_message(msg):
|
||||
try:
|
||||
client.connect(broker,1883,60)
|
||||
client.publish(topic, json.dumps(msg))
|
||||
client.disconnect()
|
||||
except ValueError as e:
|
||||
logging.error("Failed to send")
|
||||
print("Failed to send")
|
||||
print(e)
|
||||
|
||||
if _SYNC:
|
||||
containers = ["HomeAssistant","webhub-web-1","heimdall","pihole","mosquitto-mosquitto-1","mailu3-redis-1","mailu3-webmail-1","mailu3-resolver-1","mailu3-antispam-1","mailu3-webdav-1","mailu3-smtp-1","mailu3-oletools-1","mailu3-front-1","mailu3-fetchmail-1","mailu3-imap-1","matter-server","piper-en","openwakeword","whisper-en","auth-worker-1","auth-server-1","auth-authentik_ldap-1","auth-redis-1","auth-postgresql-1","nginx-app-1"]
|
||||
|
||||
cmnd = f"curl -H 'Authorization: Bearer l4c1j4yd33Du5lo' 192.168.77.238:8094/v1/update"
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _START:
|
||||
for c in containers:
|
||||
cmnd = f"docker start {c}"
|
||||
print(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
if _STOP:
|
||||
cmnd = "docker ps"
|
||||
status, running_containers = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
logging.info(running_containers)
|
||||
for c in running_containers.splitlines():
|
||||
print(c.split()[-1])
|
||||
if c.split()[-1] == "watchtower-watchtower-1":
|
||||
continue
|
||||
cmnd = f"docker stop {c.split()[-1]}"
|
||||
status, running_containers = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _RESTORE:
|
||||
logging.info("Starting Restore")
|
||||
print("Starting Restore")
|
||||
now = datetime.datetime.now()
|
||||
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
if _APP == "all":
|
||||
_DATE = "latest"
|
||||
if host == "rpi5.home.lan":
|
||||
_APP = ["nginx","ha","gitea","gitlab","mailu","bitwarden","esphome","grafana","ingluxdb","kestra","matter-server","mosquitto","octoprint","octoprint2","pihole","unify_block","webhub","homepage","watchtower"]
|
||||
else:
|
||||
cmnd = "ssh root@amd.home.lan 'ls /mnt/raid/backup/m-server/docker_data/latest'"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
_APP = output.splitlines()
|
||||
logging.info(_APP)
|
||||
#input("????")
|
||||
else:
|
||||
_APP = _APP.split(",")
|
||||
|
||||
|
||||
|
||||
PROGRESS = 0
|
||||
topic = "sectorq/amd/restore"
|
||||
step = 100 / len(_APP)
|
||||
for app in _APP:
|
||||
msg = {"mode":_MODE, "status":"restore","bak_name":"Restore","host":host,"cur_job":app,"start_time":STARTTIME,"end_time":"","progress":str(round(np.ceil(PROGRESS))) + "%","finished":1,"used_space":1}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
PROGRESS = PROGRESS + step
|
||||
now = datetime.datetime.now()
|
||||
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
|
||||
BACKUP_HOST = f"root@amd.home.lan"
|
||||
BACKUP_DEVICE = "/mnt/raid"
|
||||
BACKUP_DIR = f"/backup/{host}"
|
||||
|
||||
|
||||
if _DATE == "pick":
|
||||
cmnd = f"ssh root@amd.home.lan 'ls {BACKUP_DEVICE}/backup/m-server/docker_data'"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
#print(output)
|
||||
dates = output.splitlines()
|
||||
n = 1
|
||||
for i in dates:
|
||||
logging.info(f"{n} - {i}" )
|
||||
n += 1
|
||||
|
||||
ans = input("Pick a backup to restore : ")
|
||||
_DATE = dates[int(ans) - 1]
|
||||
|
||||
|
||||
if app == "fail2ban":
|
||||
logging.info("?>?????")
|
||||
NEW_BACKUP_DIR = f"/backup/m-server/fail2ban/{_DATE}/"
|
||||
SOURCE_DIR = f"/etc/fail2ban"
|
||||
else:
|
||||
NEW_BACKUP_DIR = f"/backup/m-server/docker_data/{_DATE}/{app}"
|
||||
SOURCE_DIR = f"/share/docker_data/"
|
||||
if _FIRST:
|
||||
BACKUP_PATH="{}/initial".format(BACKUP_DIR)
|
||||
else:
|
||||
BACKUP_PATH="{}/{}".format(BACKUP_DIR, DATETIME)
|
||||
LATEST_LINK="{}/{}".format(BACKUP_DIR,_DATE)
|
||||
FULL_BACKUP_LATEST = f"{NEW_BACKUP_DIR}/{_DATE}"
|
||||
LATEST_LINK = f"/{host}/{app}/{_DATE}"
|
||||
|
||||
logging.info("Create backup dir")
|
||||
logging.info(cmnd)
|
||||
|
||||
|
||||
#cmnd = "rsync -av --delete {}/ --link-dest {} --exclude=\".cache\" {}".format(SOURCE_DIR, LATEST_LINK, BACKUP_PATH)
|
||||
|
||||
|
||||
if app == "heimdall":
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop heimdall"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
|
||||
entries = ["Home Assistant","Nginx Proxy Manager","Portainer","Roundcube","Authentik","Kestra"]
|
||||
for e in entries:
|
||||
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"SELECT url FROM items WHERE title = '{e}'\""
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
regex = re.compile(r'[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}')
|
||||
contents = re.sub(regex, IP , output)
|
||||
cmnd = f"sqlite3 /share/docker_data/heimdall/config/www/app.sqlite \"UPDATE items SET url = '{contents}' WHERE title = '{e}'\""
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
|
||||
|
||||
|
||||
if app == "ha":
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop heimdall"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "fail2ban":
|
||||
logging.info("Stopping docker")
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "homepage":
|
||||
logging.info("Stopping docker")
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
file = "/share/docker_data/homepage/config/widgets.yaml"
|
||||
with open(file, 'r') as stream:
|
||||
try:
|
||||
loaded = yaml.load(stream, Loader=yaml.FullLoader)
|
||||
except yaml.YAMLError as exc:
|
||||
logging.info(exc)
|
||||
|
||||
# Modify the fields from the dict
|
||||
#loaded['logo']['icon'] = "/images/morefine2.png"
|
||||
logging.info(json.dumps(loaded, indent=2))
|
||||
i = 0
|
||||
for y in loaded:
|
||||
logging.info(i)
|
||||
logging.info(y)
|
||||
|
||||
if "logo" in y:
|
||||
if host == "rpi5.home.lan":
|
||||
loaded[i]['logo']['icon'] = "/images/rpi5.png"
|
||||
elif host == "nas.home.lan":
|
||||
loaded[i]['logo']['icon'] = "/images/qnap_nas.png"
|
||||
else:
|
||||
loaded[i]['logo']['icon'] = "/images/morefine2.png"
|
||||
i+=1
|
||||
|
||||
# Save it again
|
||||
logging.info(f"writing to file {file}")
|
||||
with open(file, 'w') as stream:
|
||||
try:
|
||||
yaml.dump(loaded, stream, default_flow_style=False)
|
||||
except yaml.YAMLError as exc:
|
||||
print("failed")
|
||||
print(exc)
|
||||
|
||||
|
||||
|
||||
logging.info("Start docker")
|
||||
# cmnd = "docker start heimdall"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
elif app == "nginx1":
|
||||
logging.info("Stopping docker")
|
||||
cmnd = "docker stop nginx-app-1"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
domains = ["sectorq.eu","gitlab.sectorq.eu","ha.sectorq.eu","mail.sectorq.eu","pw.sectorq.eu","semaphore.sectorq.eu","kestra.sectorq.eu","auth.sectorq.eu"]
|
||||
for d in domains:
|
||||
cmnd = f'sqlite3 /share/docker_data/nginx/data/database.sqlite "UPDATE proxy_host SET forward_host = \'{IP}\' WHERE domain_names = \'[\\"{d}\\"]\'"'
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
cmnd = 'egrep -l "# bazarr.sectorq.eu|# gitea.sectorq.eu|# jf.sectorq.eu|# kestra.sectorq.eu|# auth.sectorq.eu|# ha.sectorq.eu|# pw.sectorq.eu|# semaphore.sectorq.eu|# sectorq.eu|# gitlab.sectorq.eu|# ha.sectorq.eu" /share/docker_data/nginx/data/nginx/proxy_host/*'
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logging.info(output.splitlines())
|
||||
for file in output.splitlines():
|
||||
logging.info(file)
|
||||
f = open(file)
|
||||
contents = f.read()
|
||||
f.close()
|
||||
regex = re.compile(r'\n\s+set \$server\s+\"\w+.\w+.\w+.\w+\";')
|
||||
contents = re.sub(regex, f'\n set $server \"{IP}\";', contents)
|
||||
#print(contents)
|
||||
logging.info(regex)
|
||||
f = open(file, "w")
|
||||
contents = f.write(contents)
|
||||
f.close()
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logging.info("Starting docker")
|
||||
# cmnd = "docker start nginx-app-1"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
else:
|
||||
cmnd = f"rsync -avz --delete {BACKUP_HOST}:{BACKUP_DEVICE}{NEW_BACKUP_DIR} {SOURCE_DIR}"
|
||||
ans = "y"
|
||||
logging.info(cmnd)
|
||||
logging.info("Sync files")
|
||||
if _TEST:
|
||||
ans = input("continue?") or "n"
|
||||
if ans == "y" and _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
now = datetime.datetime.now()
|
||||
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
logging.info("Sending finished status")
|
||||
|
||||
msg = {"mode":_MODE, "status":"restore","bak_name":"Restore","host":host,"cur_job":app,"start_time":STARTTIME,"end_time":"","progress":100,"finished":ENDJOB,"used_space":1}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
if _MODE == "auto":
|
||||
cmnd = "ssh root@amd.home.lan 'systemctl suspend &'"
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
if _BACKUP:
|
||||
last = 1
|
||||
while True:
|
||||
directory = '/backups/'
|
||||
count = len(fnmatch.filter(os.listdir(directory), '*'))
|
||||
if last != count:
|
||||
logging.info(f'File Count: {count}')
|
||||
last = count
|
||||
if count == 0:
|
||||
time.sleep(10)
|
||||
continue
|
||||
else:
|
||||
finished = []
|
||||
now = datetime.datetime.now()
|
||||
STARTTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
topic = "sectorq/amd/backups"
|
||||
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":"","cur_job":"","start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
# iterate over files in
|
||||
# that directory
|
||||
|
||||
for filename in os.scandir(directory):
|
||||
if filename.is_file():
|
||||
logging.info(filename.path)
|
||||
logging.info(filename.name)
|
||||
host = filename.name
|
||||
logging.info("Backup")
|
||||
for b in backups[host]["jobs"]:
|
||||
|
||||
if not backups[host]["jobs"][b]["active"]:
|
||||
logging.info("Backup {} is not active!".format(b))
|
||||
msg = {"status":"inactive","bak_name":b,"start_time":"inactive","end_time":"inactive","progress":0}
|
||||
send_mqtt_message(msg)
|
||||
continue
|
||||
|
||||
SOURCE_DIR = backups[host]["jobs"][b]["source"]
|
||||
now = datetime.datetime.now()
|
||||
BACKUP_HOST = backups[host]["login"]
|
||||
BACKUP_DEVICE = "/mnt/raid"
|
||||
BACKUP_DIR = f"{BACKUP_HOST}:{SOURCE_DIR}"
|
||||
BACKUP_ROOT = f"{BACKUP_DEVICE}/backup/{host}/{b}"
|
||||
DATETIME = now.strftime("%Y-%m-%d_%H-%M-%S")
|
||||
|
||||
if _FIRST:
|
||||
NEW_BACKUP_DIR = f"{BACKUP_ROOT}/initial"
|
||||
else:
|
||||
NEW_BACKUP_DIR = f"{BACKUP_ROOT}/{DATETIME}_running"
|
||||
|
||||
FULL_BACKUP_LATEST = f"{BACKUP_ROOT}/latest"
|
||||
|
||||
# msg = {"status":"started","bak_name":b,"start_time":DATETIME,"end_time":"in progress", "progress":0}
|
||||
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
||||
client.connect(broker,1883,60)
|
||||
client.publish(topic, json.dumps(msg),qos=0, retain=True)
|
||||
client.disconnect()
|
||||
|
||||
cmnd = "mkdir -p " + NEW_BACKUP_DIR
|
||||
logging.info(cmnd)
|
||||
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logging.info(output)
|
||||
logging.info(status)
|
||||
logging.info("Create backup dir")
|
||||
|
||||
cmnd = f"ssh {BACKUP_HOST} 'ls {SOURCE_DIR}'"
|
||||
logger.debug(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
logger.debug(output)
|
||||
apps = output.splitlines()
|
||||
c = len(apps)
|
||||
print(apps)
|
||||
print(len(apps))
|
||||
|
||||
step = round(100 / c,1)
|
||||
progress = 0
|
||||
#cmd = f"rsync -avz --delete {BACKUP_DIR} --link-dest {FULL_BACKUP_LATEST}/ --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
|
||||
#cmd = [ 'rsync', '-avz','--info=progress2', BACKUP_DIR , NEW_BACKUP_DIR]
|
||||
|
||||
cmd = ['rsync', '-avz', '--delete', BACKUP_DIR, '--link-dest', FULL_BACKUP_LATEST, '--exclude="jellyfin/cache/transcodes"', '--exclude=".@__thumb/"', '--exclude="gitlab/logs/prometheus"', '--exclude="home-assistant.log"', '--exclude="gitlab/logs/*"', '--exclude="esphome/config/.esphome"', '--exclude=".cache"', '--exclude=".git"', '--exclude="var_lib_motioneye"', NEW_BACKUP_DIR]
|
||||
logging.info(" ".join(cmd))
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=subprocess.PIPE)
|
||||
while process.poll() is None:
|
||||
line = process.stdout.readline().decode("utf-8").split("/")
|
||||
print(line[0])
|
||||
if line[0] in apps:
|
||||
logging.info(f"Working on app {line[0]}")
|
||||
while True:
|
||||
if line[0] != apps[0]:
|
||||
del apps[0]
|
||||
progress = progress + step
|
||||
else:
|
||||
break
|
||||
apps.remove(line[0])
|
||||
#print(len(apps))
|
||||
topic = "sectorq/amd/backups"
|
||||
msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":line[0],"start_time":STARTTIME,"end_time":"in progress","progress":str(round(progress)) + "%","finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
progress = progress + step
|
||||
# input(apps)
|
||||
|
||||
|
||||
# for a in apps:
|
||||
# logging.info(f"App {a}")
|
||||
# topic = "sectorq/amd/backups"
|
||||
# msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
|
||||
# send_mqtt_message(msg)
|
||||
# logger.debug(cmnd)
|
||||
# if _FIRST:
|
||||
# cmnd = f"rsync -avz --delete {SOURCE_DIR} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" rsync://{BACKUP_HOST}{BACKUP_PATH}"
|
||||
# else:
|
||||
# cmnd = f"rsync -avz --delete {BACKUP_DIR}{a} --link-dest {FULL_BACKUP_LATEST}/{a} --exclude=\"jellyfin/cache/transcodes\" --exclude=\"gitlab/logs/prometheus\" --exclude=\"home-assistant.log\" --exclude=\"gitlab/logs/*\" --exclude=\"esphome/config/.esphome\" --exclude=\".cache\" --exclude=\".git\" --exclude=\"var_lib_motioneye\" {NEW_BACKUP_DIR}"
|
||||
|
||||
# ans = "y"
|
||||
# logging.info(cmnd)
|
||||
# logging.info("Sync files1")
|
||||
# #input("??????")
|
||||
# if _TEST:
|
||||
# ans = input("continue?") or "n"
|
||||
# if ans == "y" and _EXECUTE:
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
# #proc = subprocess.Popen(cmnd,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd = "/myapps/",shell=True)
|
||||
# progress = progress + step
|
||||
# topic = "sectorq/amd/backups"
|
||||
# msg = {"mode":_MODE, "status":"started","bak_name":"complete","host":host,"cur_job":b,"sub":a,"start_time":STARTTIME,"end_time":"in progress","progress":round(progress),"finished":",".join(finished)}
|
||||
# send_mqtt_message(msg)
|
||||
|
||||
|
||||
cmnd = f"rm -rf {FULL_BACKUP_LATEST}"
|
||||
|
||||
logging.info(cmnd)
|
||||
logging.info("Removing latest link")
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
if _FIRST:
|
||||
cmnd = f"cd {BACKUP_ROOT}; ln -s initial latest"
|
||||
else:
|
||||
cmnd = f"cd {BACKUP_ROOT}; mv {DATETIME}_running {DATETIME};ln -s {DATETIME} latest"
|
||||
logging.info("Creating new latest link")
|
||||
#print(cmnd)
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
|
||||
|
||||
#Remove old
|
||||
logging.info("Removing old dirs")
|
||||
# input("????")
|
||||
#cmnd = "find {} -maxdepth 1 -type d -mtime +30 -exec rm -rf {{}} \;".format(BACKUP_DIR)
|
||||
cmnd = f"cd {BACKUP_ROOT} find ./ -maxdepth 1 -type d -mmin +30 -exec rm -rf {{}} \\;"
|
||||
#print(cmnd)
|
||||
# input("????")
|
||||
if _EXECUTE:
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
now = datetime.datetime.now()
|
||||
ENDTIME = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
#msg = {"status":"finished","bak_name":b,"start_time":DATETIME,"end_time":ENDTIME,"progress":0}
|
||||
finished.append(b)
|
||||
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":ENDTIME,"end_time":"in progress","progress":0,"finished":",".join(finished)}
|
||||
send_mqtt_message(msg)
|
||||
|
||||
logging.info("Getting size of FS")
|
||||
cmnd = "df -h /mnt/raid|awk '{ print $3 }'|tail -1"
|
||||
logging.info(cmnd)
|
||||
status, output = subprocess.getstatusoutput(cmnd)
|
||||
used_space = (output.split())[0]
|
||||
now = datetime.datetime.now()
|
||||
ENDJOB = now.strftime("%Y-%m-%d_%H:%M:%S")
|
||||
logging.info("Size : {}".format(used_space))
|
||||
logging.info("Sending finished status")
|
||||
#msg = {"mode":_MODE,"status":"finished","bak_name":"complete","start_time":STARTTIME,"end_time":ENDJOB,"progress":0,"used_space":used_space}
|
||||
msg = {"mode":_MODE, "status":"finished","bak_name":"complete","host":host,"cur_job":b,"start_time":STARTTIME,"end_time":ENDTIME,"progress":0,"finished":",".join(finished),"used_space":used_space}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
os.remove(filename.path)
|
||||
|
||||
|
||||
topic = "sectorq/amd/restore"
|
||||
for s in servers:
|
||||
#if s != "rack.home.lan":
|
||||
if s == "m-server.home.lan":
|
||||
continue
|
||||
elif s == "nas.home.lan":
|
||||
user = "admin"
|
||||
cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
|
||||
else:
|
||||
user = "root"
|
||||
cmnd = "sudo /myapps/omv_backup.py -r all"
|
||||
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
if is_port_open(s,22):
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
# Add SSH host key automatically if needed.
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# Connect to router using username/password authentication.
|
||||
logger.info(f"Sync {s}")
|
||||
print(f"Sync {s}")
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
pkey = paramiko.RSAKey.from_private_key_file("/home/jd/.ssh/id_rsa")
|
||||
ssh.connect(s,
|
||||
username=user,
|
||||
look_for_keys=False,
|
||||
allow_agent=False,
|
||||
pkey=pkey)
|
||||
print(cmnd)
|
||||
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
|
||||
for line in iter(ssh_stdout.readline, ""):
|
||||
logger.info(line)
|
||||
print(line, end="")
|
||||
for line in iter(ssh_stderr.readline, ""):
|
||||
logger.info(line)
|
||||
ssh.close()
|
||||
# if _MODE == "auto":
|
||||
# hostup = True
|
||||
# cmnd = "ssh root@omv.home.lan 'systemctl suspend &'"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
# while hostup:
|
||||
# #HOST_UP = os.system(f"ping -c 1 -w 2 omv.home.lan") == 0
|
||||
# cmnd = f"ping -c 1 -w 2 {BACKUP_HOST}"
|
||||
# status, output = subprocess.getstatusoutput(cmnd)
|
||||
# # print(status)
|
||||
# # print(output)
|
||||
|
||||
|
||||
# if status == 0:
|
||||
# print(f"Backup host up, waiting - {n}\r", end="")
|
||||
# time.sleep(5)
|
||||
# n += 1
|
||||
# else:
|
||||
# print("Backup host down " )
|
||||
# hostup = False
|
||||
|
||||
|
||||
|
||||
# try:
|
||||
# url = "http://m-server.home.lan:8123/api/webhook/-0eWYFhSTzdusAO8jwQS9t1AT?mode=off"
|
||||
|
||||
# x = requests.post(url)
|
||||
|
||||
# print(x.text)
|
||||
# except:
|
||||
# pass
|
||||
if _SSH_TEST:
|
||||
user = "root"
|
||||
cmnd = "ls -la"
|
||||
topic = "sectorq/amd/backups"
|
||||
for s in servers:
|
||||
# if s == "m-server.home.lan":
|
||||
# continue
|
||||
# elif s == "nas.home.lan":
|
||||
# user = "admin"
|
||||
# cmnd = "/share/Data/__GITLAB/omv_backup/venv/bin/python3 /share/Data/__GITLAB/omv_backup/omv_backup.py -r all"
|
||||
msg = {"mode":_MODE, "status":"restore","bak_name":"s","host":s,"cur_job":"aaa","start_time":1,"end_time":1,"progress":0,"finished":1,"used_space":1}
|
||||
logging.info(msg)
|
||||
|
||||
send_mqtt_message(msg)
|
||||
if s != "rack.home.lan":
|
||||
continue
|
||||
if is_port_open(s,22):
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.load_system_host_keys()
|
||||
# Add SSH host key automatically if needed.
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
# Connect to router using username/password authentication.
|
||||
logger.info(f"Sync {s}")
|
||||
print(f"Sync {s}")
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
pkey = paramiko.RSAKey.from_private_key_file("/home/jd/.ssh/id_rsa")
|
||||
ssh.connect(s,
|
||||
username=user,
|
||||
look_for_keys=False,
|
||||
allow_agent=False,
|
||||
pkey=pkey)
|
||||
print(cmnd)
|
||||
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(cmnd)
|
||||
for line in iter(ssh_stdout.readline, ""):
|
||||
logger.info(line)
|
||||
print(line, end="")
|
||||
for line in iter(ssh_stderr.readline, ""):
|
||||
logger.info(line)
|
||||
ssh.close()
|
0
omv_backups copy.py
Normal file → Executable file
0
omv_backups copy.py
Normal file → Executable file
0
omv_backups.py.bak
Normal file → Executable file
0
omv_backups.py.bak
Normal file → Executable file
6
requirements.txt
Normal file → Executable file
6
requirements.txt
Normal file → Executable file
@ -1,7 +1,9 @@
|
||||
ping3
|
||||
requests
|
||||
getmac
|
||||
paho-mqtt
|
||||
paho-mqtt<2.0.0
|
||||
autorandr
|
||||
wakeonlan
|
||||
pyyaml
|
||||
pyyaml
|
||||
paramiko
|
||||
numpy
|
||||
|
247
venv/bin/Activate.ps1
Executable file
247
venv/bin/Activate.ps1
Executable file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
69
venv/bin/activate
Executable file
69
venv/bin/activate
Executable file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV=/home/jd/projects/omv_backup/venv
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(venv) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(venv) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
26
venv/bin/activate.csh
Executable file
26
venv/bin/activate.csh
Executable file
@ -0,0 +1,26 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /home/jd/projects/omv_backup/venv
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(venv) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(venv) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
69
venv/bin/activate.fish
Executable file
69
venv/bin/activate.fish
Executable file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /home/jd/projects/omv_backup/venv
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(venv) '
|
||||
end
|
8
venv/bin/autorandr
Executable file
8
venv/bin/autorandr
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from autorandr import exception_handled_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(exception_handled_main())
|
8
venv/bin/getmac
Executable file
8
venv/bin/getmac
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from getmac.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
venv/bin/normalizer
Executable file
8
venv/bin/normalizer
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from charset_normalizer import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli.cli_detect())
|
8
venv/bin/ping3
Executable file
8
venv/bin/ping3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from ping3.command_line import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
venv/bin/pip
Executable file
8
venv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
venv/bin/pip3
Executable file
8
venv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
venv/bin/pip3.11
Executable file
8
venv/bin/pip3.11
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
1
venv/bin/python
Symbolic link
1
venv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
1
venv/bin/python3
Symbolic link
1
venv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
1
venv/bin/python3.11
Symbolic link
1
venv/bin/python3.11
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
8
venv/bin/wakeonlan
Executable file
8
venv/bin/wakeonlan
Executable file
@ -0,0 +1,8 @@
|
||||
#!/home/jd/projects/omv_backup/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from wakeonlan import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
1
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER
Executable file
1
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
174
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/LICENSE
Executable file
174
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/LICENSE
Executable file
@ -0,0 +1,174 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
245
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/METADATA
Executable file
245
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/METADATA
Executable file
@ -0,0 +1,245 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyNaCl
|
||||
Version: 1.5.0
|
||||
Summary: Python binding to the Networking and Cryptography (NaCl) library
|
||||
Home-page: https://github.com/pyca/pynacl/
|
||||
Author: The PyNaCl developers
|
||||
Author-email: cryptography-dev@python.org
|
||||
License: Apache License 2.0
|
||||
Platform: UNKNOWN
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Requires-Python: >=3.6
|
||||
Requires-Dist: cffi (>=1.4.1)
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx (>=1.6.5) ; extra == 'docs'
|
||||
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: pytest (!=3.3.0,>=3.2.1) ; extra == 'tests'
|
||||
Requires-Dist: hypothesis (>=3.27.0) ; extra == 'tests'
|
||||
|
||||
===============================================
|
||||
PyNaCl: Python binding to the libsodium library
|
||||
===============================================
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pynacl.svg
|
||||
:target: https://pypi.org/project/PyNaCl/
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=main
|
||||
:target: https://codecov.io/github/pyca/pynacl?branch=main
|
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/pynacl.svg
|
||||
:target: https://pypi.org/project/PyNaCl/
|
||||
:alt: Compatible Python Versions
|
||||
|
||||
PyNaCl is a Python binding to `libsodium`_, which is a fork of the
|
||||
`Networking and Cryptography library`_. These libraries have a stated goal of
|
||||
improving usability, security and speed. It supports Python 3.6+ as well as
|
||||
PyPy 3.
|
||||
|
||||
.. _libsodium: https://github.com/jedisct1/libsodium
|
||||
.. _Networking and Cryptography library: https://nacl.cr.yp.to/
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* Digital signatures
|
||||
* Secret-key encryption
|
||||
* Public-key encryption
|
||||
* Hashing and message authentication
|
||||
* Password based key derivation and password hashing
|
||||
|
||||
`Changelog`_
|
||||
------------
|
||||
|
||||
.. _Changelog: https://pynacl.readthedocs.io/en/stable/changelog/
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
Binary wheel install
|
||||
--------------------
|
||||
|
||||
PyNaCl ships as a binary wheel on macOS, Windows and Linux ``manylinux1`` [#many]_ ,
|
||||
so all dependencies are included. Make sure you have an up-to-date pip
|
||||
and run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install pynacl
|
||||
|
||||
Faster wheel build
|
||||
------------------
|
||||
|
||||
You can define the environment variable ``LIBSODIUM_MAKE_ARGS`` to pass arguments to ``make``
|
||||
and enable `parallelization`_:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ LIBSODIUM_MAKE_ARGS=-j4 pip install pynacl
|
||||
|
||||
Linux source build
|
||||
------------------
|
||||
|
||||
PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled
|
||||
with PyNaCl so to install you can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install pynacl
|
||||
|
||||
If you'd prefer to use the version of ``libsodium`` provided by your
|
||||
distribution, you can disable the bundled copy during install by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ SODIUM_INSTALL=system pip install pynacl
|
||||
|
||||
.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools
|
||||
is generally discouraged, and is completely unsupported in PyNaCl's case.
|
||||
|
||||
.. _parallelization: https://www.gnu.org/software/make/manual/html_node/Parallel.html
|
||||
|
||||
.. _libsodium: https://github.com/jedisct1/libsodium
|
||||
|
||||
.. [#many] `manylinux1 wheels <https://www.python.org/dev/peps/pep-0513/>`_
|
||||
are built on a baseline linux environment based on Centos 5.11
|
||||
and should work on most x86 and x86_64 glibc based linux environments.
|
||||
|
||||
Changelog
|
||||
=========
|
||||
|
||||
1.5.0 (2022-01-07)
|
||||
------------------
|
||||
|
||||
* **BACKWARDS INCOMPATIBLE:** Removed support for Python 2.7 and Python 3.5.
|
||||
* **BACKWARDS INCOMPATIBLE:** We no longer distribute ``manylinux1``
|
||||
wheels.
|
||||
* Added ``manylinux2014``, ``manylinux_2_24``, ``musllinux``, and macOS
|
||||
``universal2`` wheels (the latter supports macOS ``arm64``).
|
||||
* Update ``libsodium`` to 1.0.18-stable (July 25, 2021 release).
|
||||
* Add inline type hints.
|
||||
|
||||
1.4.0 (2020-05-25)
|
||||
------------------
|
||||
|
||||
* Update ``libsodium`` to 1.0.18.
|
||||
* **BACKWARDS INCOMPATIBLE:** We no longer distribute 32-bit ``manylinux1``
|
||||
wheels. Continuing to produce them was a maintenance burden.
|
||||
* Added support for Python 3.8, and removed support for Python 3.4.
|
||||
* Add low level bindings for extracting the seed and the public key
|
||||
from crypto_sign_ed25519 secret key
|
||||
* Add low level bindings for deterministic random generation.
|
||||
* Add ``wheel`` and ``setuptools`` setup_requirements in ``setup.py`` (#485)
|
||||
* Fix checks on very slow builders (#481, #495)
|
||||
* Add low-level bindings to ed25519 arithmetic functions
|
||||
* Update low-level blake2b state implementation
|
||||
* Fix wrong short-input behavior of SealedBox.decrypt() (#517)
|
||||
* Raise CryptPrefixError exception instead of InvalidkeyError when trying
|
||||
to check a password against a verifier stored in a unknown format (#519)
|
||||
* Add support for minimal builds of libsodium. Trying to call functions
|
||||
not available in a minimal build will raise an UnavailableError
|
||||
exception. To compile a minimal build of the bundled libsodium, set
|
||||
the SODIUM_INSTALL_MINIMAL environment variable to any non-empty
|
||||
string (e.g. ``SODIUM_INSTALL_MINIMAL=1``) for setup.
|
||||
|
||||
1.3.0 2018-09-26
|
||||
----------------
|
||||
|
||||
* Added support for Python 3.7.
|
||||
* Update ``libsodium`` to 1.0.16.
|
||||
* Run and test all code examples in PyNaCl docs through sphinx's
|
||||
doctest builder.
|
||||
* Add low-level bindings for chacha20-poly1305 AEAD constructions.
|
||||
* Add low-level bindings for the chacha20-poly1305 secretstream constructions.
|
||||
* Add low-level bindings for ed25519ph pre-hashed signing construction.
|
||||
* Add low-level bindings for constant-time increment and addition
|
||||
on fixed-precision big integers represented as little-endian
|
||||
byte sequences.
|
||||
* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API.
|
||||
* Add low-level bindings for libsodium's crypto_kx... key exchange
|
||||
construction.
|
||||
* Set hypothesis deadline to None in tests/test_pwhash.py to avoid
|
||||
incorrect test failures on slower processor architectures. GitHub
|
||||
issue #370
|
||||
|
||||
1.2.1 - 2017-12-04
|
||||
------------------
|
||||
|
||||
* Update hypothesis minimum allowed version.
|
||||
* Infrastructure: add proper configuration for readthedocs builder
|
||||
runtime environment.
|
||||
|
||||
1.2.0 - 2017-11-01
|
||||
------------------
|
||||
|
||||
* Update ``libsodium`` to 1.0.15.
|
||||
* Infrastructure: add jenkins support for automatic build of
|
||||
``manylinux1`` binary wheels
|
||||
* Added support for ``SealedBox`` construction.
|
||||
* Added support for ``argon2i`` and ``argon2id`` password hashing constructs
|
||||
and restructured high-level password hashing implementation to expose
|
||||
the same interface for all hashers.
|
||||
* Added support for 128 bit ``siphashx24`` variant of ``siphash24``.
|
||||
* Added support for ``from_seed`` APIs for X25519 keypair generation.
|
||||
* Dropped support for Python 3.3.
|
||||
|
||||
1.1.2 - 2017-03-31
|
||||
------------------
|
||||
|
||||
* reorder link time library search path when using bundled
|
||||
libsodium
|
||||
|
||||
1.1.1 - 2017-03-15
|
||||
------------------
|
||||
|
||||
* Fixed a circular import bug in ``nacl.utils``.
|
||||
|
||||
1.1.0 - 2017-03-14
|
||||
------------------
|
||||
|
||||
* Dropped support for Python 2.6.
|
||||
* Added ``shared_key()`` method on ``Box``.
|
||||
* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or
|
||||
``SecretBox`` and it will automatically generate a random nonce.
|
||||
* Added support for ``siphash24``.
|
||||
* Added support for ``blake2b``.
|
||||
* Added support for ``scrypt``.
|
||||
* Update ``libsodium`` to 1.0.11.
|
||||
* Default to the bundled ``libsodium`` when compiling.
|
||||
* All raised exceptions are defined mixing-in
|
||||
``nacl.exceptions.CryptoError``
|
||||
|
||||
1.0.1 - 2016-01-24
|
||||
------------------
|
||||
|
||||
* Fix an issue with absolute paths that prevented the creation of wheels.
|
||||
|
||||
1.0 - 2016-01-23
|
||||
----------------
|
||||
|
||||
* PyNaCl has been ported to use the new APIs available in cffi 1.0+.
|
||||
Due to this change we no longer support PyPy releases older than 2.6.
|
||||
* Python 3.2 support has been dropped.
|
||||
* Functions to convert between Ed25519 and Curve25519 keys have been added.
|
||||
|
||||
0.3.0 - 2015-03-04
|
||||
------------------
|
||||
|
||||
* The low-level API (`nacl.c.*`) has been changed to match the
|
||||
upstream NaCl C/C++ conventions (as well as those of other NaCl bindings).
|
||||
The order of arguments and return values has changed significantly. To
|
||||
avoid silent failures, `nacl.c` has been removed, and replaced with
|
||||
`nacl.bindings` (with the new argument ordering). If you have code which
|
||||
calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review
|
||||
the new docstrings and update your code/imports to match the new
|
||||
conventions.
|
||||
|
||||
|
68
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/RECORD
Executable file
68
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/RECORD
Executable file
@ -0,0 +1,68 @@
|
||||
PyNaCl-1.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyNaCl-1.5.0.dist-info/LICENSE,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694
|
||||
PyNaCl-1.5.0.dist-info/METADATA,sha256=OJaXCiHgNRywLY9cj3X2euddUPZ4dnyyqAQMU01X4j0,8634
|
||||
PyNaCl-1.5.0.dist-info/RECORD,,
|
||||
PyNaCl-1.5.0.dist-info/WHEEL,sha256=TIQeZFe3DwXBO5UGlCH1aKpf5Cx6FJLbIUqd-Sq2juI,185
|
||||
PyNaCl-1.5.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13
|
||||
nacl/__init__.py,sha256=0IUunzBT8_Jn0DUdHacBExOYeAEMggo8slkfjo7O0XM,1116
|
||||
nacl/__pycache__/__init__.cpython-311.pyc,,
|
||||
nacl/__pycache__/encoding.cpython-311.pyc,,
|
||||
nacl/__pycache__/exceptions.cpython-311.pyc,,
|
||||
nacl/__pycache__/hash.cpython-311.pyc,,
|
||||
nacl/__pycache__/hashlib.cpython-311.pyc,,
|
||||
nacl/__pycache__/public.cpython-311.pyc,,
|
||||
nacl/__pycache__/secret.cpython-311.pyc,,
|
||||
nacl/__pycache__/signing.cpython-311.pyc,,
|
||||
nacl/__pycache__/utils.cpython-311.pyc,,
|
||||
nacl/_sodium.abi3.so,sha256=uJ6RwSnbb9wO4esR0bVUqrfFHtBOGm34IQIdmaE1fGY,2740136
|
||||
nacl/bindings/__init__.py,sha256=BDlStrds2EuUS4swOL4pnf92PWVS_CHRCptX3KhEX-s,16997
|
||||
nacl/bindings/__pycache__/__init__.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_aead.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_box.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_core.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_generichash.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_hash.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_kx.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_pwhash.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_scalarmult.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_secretbox.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_secretstream.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_shorthash.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/crypto_sign.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/randombytes.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/sodium_core.cpython-311.pyc,,
|
||||
nacl/bindings/__pycache__/utils.cpython-311.pyc,,
|
||||
nacl/bindings/crypto_aead.py,sha256=BIw1k_JCfr5ylZk0RF5rCFIM1fhfLkEa-aiWkrfffNE,15597
|
||||
nacl/bindings/crypto_box.py,sha256=Ox0NG2t4MsGhBAa7Kgah4o0gc99ULMsqkdX56ofOouY,10139
|
||||
nacl/bindings/crypto_core.py,sha256=6u9G3y7H-QrawO785UkFFFtwDoCkeHE63GOUl9p5-eA,13736
|
||||
nacl/bindings/crypto_generichash.py,sha256=9mX0DGIIzicr-uXrqFM1nU4tirasbixDwbcdfV7W1fc,8852
|
||||
nacl/bindings/crypto_hash.py,sha256=Rg1rsEwE3azhsQT-dNVPA4NB9VogJAKn1EfxYt0pPe0,2175
|
||||
nacl/bindings/crypto_kx.py,sha256=oZNVlNgROpHOa1XQ_uZe0tqIkdfuApeJlRnwR23_74k,6723
|
||||
nacl/bindings/crypto_pwhash.py,sha256=laVDo4xFUuGyEjtZAU510AklBF6ablBy7Z3HN1WDYjY,18848
|
||||
nacl/bindings/crypto_scalarmult.py,sha256=_DX-mst2uCnzjo6fP5HRTnhv1BC95B9gmJc3L_or16g,8244
|
||||
nacl/bindings/crypto_secretbox.py,sha256=KgZ1VvkCJDlQ85jtfe9c02VofPvuEgZEhWni-aX3MsM,2914
|
||||
nacl/bindings/crypto_secretstream.py,sha256=G0FgZS01qA5RzWzm5Bdms8Yy_lvgdZDoUYYBActPmvQ,11165
|
||||
nacl/bindings/crypto_shorthash.py,sha256=PQU7djHTLDGdVs-w_TsivjFHHp5EK5k2Yh6p-6z0T60,2603
|
||||
nacl/bindings/crypto_sign.py,sha256=53j2im9E4F79qT_2U8IfCAc3lzg0VMwEjvAPEUccVDg,10342
|
||||
nacl/bindings/randombytes.py,sha256=uBK3W4WcjgnjZdWanrX0fjYZpr9KHbBgNMl9rui-Ojc,1563
|
||||
nacl/bindings/sodium_core.py,sha256=9Y9CX--sq-TaPaQRPRpx8SWDSS9PJOja_Cqb-yqyJNQ,1039
|
||||
nacl/bindings/utils.py,sha256=KDwQnadXeNMbqEA1SmpNyCVo5k8MiUQa07QM66VzfXM,4298
|
||||
nacl/encoding.py,sha256=qTAPc2MXSkdh4cqDVY0ra6kHyViHMCmEo_re7cgGk5w,2915
|
||||
nacl/exceptions.py,sha256=GZH32aJtZgqCO4uz0LRsev8z0WyvAYuV3YVqT9AAQq4,2451
|
||||
nacl/hash.py,sha256=EYBOe6UVc9SUQINEmyuRSa1QGRSvdwdrBzTL1tdFLU8,6392
|
||||
nacl/hashlib.py,sha256=L5Fv75St8AMPvb-GhA4YqX5p1mC_Sb4HhC1NxNQMpJA,4400
|
||||
nacl/public.py,sha256=RVGCWQRjIJOmW-8sNrVLtsDjMMGx30i6UyfViGCnQNA,14792
|
||||
nacl/pwhash/__init__.py,sha256=XSDXd7wQHNLEHl0mkHfVb5lFQsp6ygHkhen718h0BSM,2675
|
||||
nacl/pwhash/__pycache__/__init__.cpython-311.pyc,,
|
||||
nacl/pwhash/__pycache__/_argon2.cpython-311.pyc,,
|
||||
nacl/pwhash/__pycache__/argon2i.cpython-311.pyc,,
|
||||
nacl/pwhash/__pycache__/argon2id.cpython-311.pyc,,
|
||||
nacl/pwhash/__pycache__/scrypt.cpython-311.pyc,,
|
||||
nacl/pwhash/_argon2.py,sha256=jL1ChR9biwYh3RSuc-LJ2-W4DlVLHpir-XHGX8cpeJQ,1779
|
||||
nacl/pwhash/argon2i.py,sha256=IIvIuO9siKUu5-Wpz0SGiltLQv7Du_mi9BUE8INRK_4,4405
|
||||
nacl/pwhash/argon2id.py,sha256=H22i8O4j9Ws4L3JsXl9TRcJzDcyaVumhQRPzINAgJWM,4433
|
||||
nacl/pwhash/scrypt.py,sha256=fMr3Qht1a1EY8aebNNntfLRjinIPXtKYKKrrBhY5LDc,6986
|
||||
nacl/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
nacl/secret.py,sha256=kauBNuP-0rb3TjU2EMBMu5Vnmzjnscp1bRqMspy5LzU,12108
|
||||
nacl/signing.py,sha256=kbTEUyHLUMaNLv1nCjxzGxCs82Qs5w8gxE_CnEwPuIU,8337
|
||||
nacl/utils.py,sha256=gmlTD1x9ZNwzHd8LpALH1CHud-Htv8ejRb3y7TyS9f0,2341
|
7
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/WHEEL
Executable file
7
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/WHEEL
Executable file
@ -0,0 +1,7 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp36-abi3-manylinux_2_17_x86_64
|
||||
Tag: cp36-abi3-manylinux2014_x86_64
|
||||
Tag: cp36-abi3-manylinux_2_24_x86_64
|
||||
|
2
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt
Executable file
2
venv/lib/python3.11/site-packages/PyNaCl-1.5.0.dist-info/top_level.txt
Executable file
@ -0,0 +1,2 @@
|
||||
_sodium
|
||||
nacl
|
1
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/INSTALLER
Executable file
1
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
20
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/LICENSE
Executable file
20
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/LICENSE
Executable file
@ -0,0 +1,20 @@
|
||||
Copyright (c) 2017-2021 Ingy döt Net
|
||||
Copyright (c) 2006-2016 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
46
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/METADATA
Executable file
46
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/METADATA
Executable file
@ -0,0 +1,46 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyYAML
|
||||
Version: 6.0.2
|
||||
Summary: YAML parser and emitter for Python
|
||||
Home-page: https://pyyaml.org/
|
||||
Download-URL: https://pypi.org/project/PyYAML/
|
||||
Author: Kirill Simonov
|
||||
Author-email: xi@resolvent.net
|
||||
License: MIT
|
||||
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
||||
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
||||
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
||||
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
||||
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
||||
Platform: Any
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Cython
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Text Processing :: Markup
|
||||
Requires-Python: >=3.8
|
||||
License-File: LICENSE
|
||||
|
||||
YAML is a data serialization format designed for human readability
|
||||
and interaction with scripting languages. PyYAML is a YAML parser
|
||||
and emitter for Python.
|
||||
|
||||
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
||||
support, capable extension API, and sensible error messages. PyYAML
|
||||
supports standard YAML tags and provides Python-specific tags that
|
||||
allow to represent an arbitrary Python object.
|
||||
|
||||
PyYAML is applicable for a broad range of tasks from complex
|
||||
configuration files to object serialization and persistence.
|
44
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/RECORD
Executable file
44
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/RECORD
Executable file
@ -0,0 +1,44 @@
|
||||
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
||||
PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060
|
||||
PyYAML-6.0.2.dist-info/RECORD,,
|
||||
PyYAML-6.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
PyYAML-6.0.2.dist-info/WHEEL,sha256=YWWHkv6sHhBDPNqgSfLklIm4KZnZJH4x2lIHOwCoU7Q,152
|
||||
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
||||
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
||||
_yaml/__pycache__/__init__.cpython-311.pyc,,
|
||||
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
|
||||
yaml/__pycache__/__init__.cpython-311.pyc,,
|
||||
yaml/__pycache__/composer.cpython-311.pyc,,
|
||||
yaml/__pycache__/constructor.cpython-311.pyc,,
|
||||
yaml/__pycache__/cyaml.cpython-311.pyc,,
|
||||
yaml/__pycache__/dumper.cpython-311.pyc,,
|
||||
yaml/__pycache__/emitter.cpython-311.pyc,,
|
||||
yaml/__pycache__/error.cpython-311.pyc,,
|
||||
yaml/__pycache__/events.cpython-311.pyc,,
|
||||
yaml/__pycache__/loader.cpython-311.pyc,,
|
||||
yaml/__pycache__/nodes.cpython-311.pyc,,
|
||||
yaml/__pycache__/parser.cpython-311.pyc,,
|
||||
yaml/__pycache__/reader.cpython-311.pyc,,
|
||||
yaml/__pycache__/representer.cpython-311.pyc,,
|
||||
yaml/__pycache__/resolver.cpython-311.pyc,,
|
||||
yaml/__pycache__/scanner.cpython-311.pyc,,
|
||||
yaml/__pycache__/serializer.cpython-311.pyc,,
|
||||
yaml/__pycache__/tokens.cpython-311.pyc,,
|
||||
yaml/_yaml.cpython-311-x86_64-linux-gnu.so,sha256=sZBsAqPs6VM8YzOkHpNL0qKIfR0zNM9gttjzjoVPaiI,2466120
|
||||
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
||||
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
||||
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
||||
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
||||
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
||||
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
||||
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
||||
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
||||
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
||||
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
||||
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
||||
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
||||
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
||||
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
||||
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
||||
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
0
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/REQUESTED
Executable file
0
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/REQUESTED
Executable file
6
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/WHEEL
Executable file
6
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/WHEEL
Executable file
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.44.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp311-cp311-manylinux_2_17_x86_64
|
||||
Tag: cp311-cp311-manylinux2014_x86_64
|
||||
|
2
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/top_level.txt
Executable file
2
venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/top_level.txt
Executable file
@ -0,0 +1,2 @@
|
||||
_yaml
|
||||
yaml
|
BIN
venv/lib/python3.11/site-packages/__pycache__/autorandr.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/__pycache__/autorandr.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so
Executable file
BIN
venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so
Executable file
Binary file not shown.
222
venv/lib/python3.11/site-packages/_distutils_hack/__init__.py
Executable file
222
venv/lib/python3.11/site-packages/_distutils_hack/__init__.py
Executable file
@ -0,0 +1,222 @@
|
||||
# don't import any costly modules
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||
|
||||
|
||||
def warn_distutils_present():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
if is_pypy and sys.version_info < (3, 7):
|
||||
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||
"using distutils directly, ensure that setuptools is installed in the "
|
||||
"traditional way (e.g. not an editable install), and/or make sure "
|
||||
"that setuptools is always imported before distutils."
|
||||
)
|
||||
|
||||
|
||||
def clear_distutils():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn("Setuptools is replacing distutils.")
|
||||
mods = [
|
||||
name
|
||||
for name in sys.modules
|
||||
if name == "distutils" or name.startswith("distutils.")
|
||||
]
|
||||
for name in mods:
|
||||
del sys.modules[name]
|
||||
|
||||
|
||||
def enabled():
|
||||
"""
|
||||
Allow selection of distutils by environment variable.
|
||||
"""
|
||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||
return which == 'local'
|
||||
|
||||
|
||||
def ensure_local_distutils():
|
||||
import importlib
|
||||
|
||||
clear_distutils()
|
||||
|
||||
# With the DistutilsMetaFinder in place,
|
||||
# perform an import to cause distutils to be
|
||||
# loaded from setuptools._distutils. Ref #2906.
|
||||
with shim():
|
||||
importlib.import_module('distutils')
|
||||
|
||||
# check that submodules load as expected
|
||||
core = importlib.import_module('distutils.core')
|
||||
assert '_distutils' in core.__file__, core.__file__
|
||||
assert 'setuptools._distutils.log' not in sys.modules
|
||||
|
||||
|
||||
def do_override():
|
||||
"""
|
||||
Ensure that the local copy of distutils is preferred over stdlib.
|
||||
|
||||
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||
for more motivation.
|
||||
"""
|
||||
if enabled():
|
||||
warn_distutils_present()
|
||||
ensure_local_distutils()
|
||||
|
||||
|
||||
class _TrivialRe:
|
||||
def __init__(self, *patterns):
|
||||
self._patterns = patterns
|
||||
|
||||
def match(self, string):
|
||||
return all(pat in string for pat in self._patterns)
|
||||
|
||||
|
||||
class DistutilsMetaFinder:
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
# optimization: only consider top level modules and those
|
||||
# found in the CPython test suite.
|
||||
if path is not None and not fullname.startswith('test.'):
|
||||
return
|
||||
|
||||
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||
method = getattr(self, method_name, lambda: None)
|
||||
return method()
|
||||
|
||||
def spec_for_distutils(self):
|
||||
if self.is_cpython():
|
||||
return
|
||||
|
||||
import importlib
|
||||
import importlib.abc
|
||||
import importlib.util
|
||||
|
||||
try:
|
||||
mod = importlib.import_module('setuptools._distutils')
|
||||
except Exception:
|
||||
# There are a couple of cases where setuptools._distutils
|
||||
# may not be present:
|
||||
# - An older Setuptools without a local distutils is
|
||||
# taking precedence. Ref #2957.
|
||||
# - Path manipulation during sitecustomize removes
|
||||
# setuptools from the path but only after the hook
|
||||
# has been loaded. Ref #2980.
|
||||
# In either case, fall back to stdlib behavior.
|
||||
return
|
||||
|
||||
class DistutilsLoader(importlib.abc.Loader):
|
||||
def create_module(self, spec):
|
||||
mod.__name__ = 'distutils'
|
||||
return mod
|
||||
|
||||
def exec_module(self, module):
|
||||
pass
|
||||
|
||||
return importlib.util.spec_from_loader(
|
||||
'distutils', DistutilsLoader(), origin=mod.__file__
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def is_cpython():
|
||||
"""
|
||||
Suppress supplying distutils for CPython (build and tests).
|
||||
Ref #2965 and #3007.
|
||||
"""
|
||||
return os.path.isfile('pybuilddir.txt')
|
||||
|
||||
def spec_for_pip(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running under pip.
|
||||
See pypa/pip#8761 for rationale.
|
||||
"""
|
||||
if self.pip_imported_during_build():
|
||||
return
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
@classmethod
|
||||
def pip_imported_during_build(cls):
|
||||
"""
|
||||
Detect if pip is being imported in a build script. Ref #2355.
|
||||
"""
|
||||
import traceback
|
||||
|
||||
return any(
|
||||
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def frame_file_is_setup(frame):
|
||||
"""
|
||||
Return True if the indicated frame suggests a setup.py file.
|
||||
"""
|
||||
# some frames may not have __file__ (#2940)
|
||||
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
||||
|
||||
def spec_for_sensitive_tests(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running select tests under CPython.
|
||||
|
||||
python/cpython#91169
|
||||
"""
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
sensitive_tests = (
|
||||
[
|
||||
'test.test_distutils',
|
||||
'test.test_peg_generator',
|
||||
'test.test_importlib',
|
||||
]
|
||||
if sys.version_info < (3, 10)
|
||||
else [
|
||||
'test.test_distutils',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
for name in DistutilsMetaFinder.sensitive_tests:
|
||||
setattr(
|
||||
DistutilsMetaFinder,
|
||||
f'spec_for_{name}',
|
||||
DistutilsMetaFinder.spec_for_sensitive_tests,
|
||||
)
|
||||
|
||||
|
||||
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||
|
||||
|
||||
def add_shim():
|
||||
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
||||
|
||||
|
||||
class shim:
|
||||
def __enter__(self):
|
||||
insert_shim()
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
remove_shim()
|
||||
|
||||
|
||||
def insert_shim():
|
||||
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||
|
||||
|
||||
def remove_shim():
|
||||
try:
|
||||
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||
except ValueError:
|
||||
pass
|
Binary file not shown.
Binary file not shown.
1
venv/lib/python3.11/site-packages/_distutils_hack/override.py
Executable file
1
venv/lib/python3.11/site-packages/_distutils_hack/override.py
Executable file
@ -0,0 +1 @@
|
||||
__import__('_distutils_hack').do_override()
|
33
venv/lib/python3.11/site-packages/_yaml/__init__.py
Executable file
33
venv/lib/python3.11/site-packages/_yaml/__init__.py
Executable file
@ -0,0 +1,33 @@
|
||||
# This is a stub package designed to roughly emulate the _yaml
|
||||
# extension module, which previously existed as a standalone module
|
||||
# and has been moved into the `yaml` package namespace.
|
||||
# It does not perfectly mimic its old counterpart, but should get
|
||||
# close enough for anyone who's relying on it even when they shouldn't.
|
||||
import yaml
|
||||
|
||||
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
||||
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
||||
if not getattr(yaml, '__with_libyaml__', False):
|
||||
from sys import version_info
|
||||
|
||||
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
||||
raise exc("No module named '_yaml'")
|
||||
else:
|
||||
from yaml._yaml import *
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'The _yaml extension module is now located at yaml._yaml'
|
||||
' and its location is subject to change. To use the'
|
||||
' LibYAML-based parser and emitter, import from `yaml`:'
|
||||
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
||||
DeprecationWarning
|
||||
)
|
||||
del warnings
|
||||
# Don't `del yaml` here because yaml is actually an existing
|
||||
# namespace member of _yaml.
|
||||
|
||||
__name__ = '_yaml'
|
||||
# If the module is top-level (i.e. not a part of any specific package)
|
||||
# then the attribute should be set to ''.
|
||||
# https://docs.python.org/3.8/library/types.html
|
||||
__package__ = ''
|
BIN
venv/lib/python3.11/site-packages/_yaml/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/_yaml/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
@ -0,0 +1 @@
|
||||
pip
|
422
venv/lib/python3.11/site-packages/autorandr-1.15.post1.dist-info/METADATA
Executable file
422
venv/lib/python3.11/site-packages/autorandr-1.15.post1.dist-info/METADATA
Executable file
@ -0,0 +1,422 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: autorandr
|
||||
Version: 1.15.post1
|
||||
Summary: Automatically select a display configuration based on connected devices
|
||||
Home-page: https://github.com/phillipberndt/autorandr
|
||||
Author: Phillip Berndt
|
||||
Author-email: phillip.berndt@googlemail.com
|
||||
License: GPLv3
|
||||
Keywords: xrandr
|
||||
Platform: UNKNOWN
|
||||
Classifier: Environment :: Console
|
||||
Classifier: Intended Audience :: End Users/Desktop
|
||||
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# autorandr
|
||||
|
||||
Automatically select a display configuration based on connected devices
|
||||
|
||||
## Branch information
|
||||
|
||||
This is a compatible Python rewrite of
|
||||
[wertarbyte/autorandr](https://github.com/wertarbyte/autorandr). Contributions
|
||||
for bash-completion, fd.o/XDG autostart, Nitrogen, pm-utils, and systemd can be
|
||||
found under [contrib](contrib/).
|
||||
|
||||
The original [wertarbyte/autorandr](https://github.com/wertarbyte/autorandr)
|
||||
tree is unmaintained, with lots of open pull requests and issues. I forked it
|
||||
and merged what I thought were the most important changes. If you are searching
|
||||
for that version, see the [`legacy` branch](https://github.com/phillipberndt/autorandr/tree/legacy).
|
||||
Note that the Python version is better suited for non-standard configurations,
|
||||
like if you use `--transform` or `--reflect`. If you use `auto-disper`, you
|
||||
have to use the bash version, as there is no disper support in the Python
|
||||
version (yet). Both versions use a compatible configuration file format, so
|
||||
you can, to some extent, switch between them. I will maintain the `legacy`
|
||||
branch until @wertarbyte finds the time to maintain his branch again.
|
||||
|
||||
If you are interested in why there are two versions around, see
|
||||
[#7](https://github.com/phillipberndt/autorandr/issues/7),
|
||||
[#8](https://github.com/phillipberndt/autorandr/issues/8) and
|
||||
especially
|
||||
[#12](https://github.com/phillipberndt/autorandr/issues/12)
|
||||
if you are unhappy with this version and would like to contribute to the bash
|
||||
version.
|
||||
|
||||
## License information and authors
|
||||
|
||||
autorandr is available under the terms of the GNU General Public License
|
||||
(version 3).
|
||||
|
||||
Contributors to this version of autorandr are:
|
||||
|
||||
* Adrián López
|
||||
* andersonjacob
|
||||
* Alexander Lochmann
|
||||
* Alexander Wirt
|
||||
* Brice Waegeneire
|
||||
* Chris Dunder
|
||||
* Christoph Gysin
|
||||
* Christophe-Marie Duquesne
|
||||
* Daniel Hahler
|
||||
* Maciej Sitarz
|
||||
* Mathias Svensson
|
||||
* Matthew R Johnson
|
||||
* Nazar Mokrynskyi
|
||||
* Phillip Berndt
|
||||
* Rasmus Wriedt Larsen
|
||||
* Sam Coulter
|
||||
* Simon Wydooghe
|
||||
* Stefan Tomanek
|
||||
* stormc
|
||||
* tachylatus
|
||||
* Timo Bingmann
|
||||
* Timo Kaufmann
|
||||
* Tomasz Bogdal
|
||||
* Victor Häggqvist
|
||||
* Jan-Oliver Kaiser
|
||||
* Alexandre Viau
|
||||
|
||||
## Installation/removal
|
||||
|
||||
You can use the `autorandr.py` script as a stand-alone binary. If you'd like to
|
||||
install it as a system-wide application, there is a Makefile included that also
|
||||
places some configuration files in appropriate directories such that autorandr
|
||||
is invoked automatically when a monitor is connected or removed, the system
|
||||
wakes up from suspend, or a user logs into an X11 session. Run `make install`
|
||||
as root to install it.
|
||||
|
||||
If you prefer to have a system wide install managed by your package manager,
|
||||
you can
|
||||
|
||||
* Use the [official Arch package](https://archlinux.org/packages/extra/any/autorandr/).
|
||||
* Use the [official Debian package](https://packages.debian.org/sid/x11/autorandr) on sid
|
||||
* Use the [FreeBSD Ports Collection](https://www.freshports.org/x11/autorandr/) on FreeBSD.
|
||||
* Use the [official Gentoo package](https://packages.gentoo.org/packages/x11-misc/autorandr).
|
||||
* Use the
|
||||
[nix package](https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/autorandr.nix)
|
||||
on NixOS.
|
||||
* Use the
|
||||
[guix package](https://git.savannah.gnu.org/cgit/guix.git/log/gnu/packages/xdisorg.scm?qt=grep&q=autorandr)
|
||||
on Guix.
|
||||
* Use the [SlackBuild](https://slackbuilds.org/repository/14.2/desktop/autorandr/) on Slackware.
|
||||
* Use the automated nightlies generated by the
|
||||
[openSUSE build service](https://build.opensuse.org/package/show/home:phillipberndt/autorandr)
|
||||
for various distributions (RPM and DEB based).
|
||||
* Use the [X binary package system](https://wiki.voidlinux.eu/XBPS)' on Void Linux
|
||||
* Build a .deb-file from the source tree using `make deb`.
|
||||
* Build a .rpm-file from the source tree using `make rpm`.
|
||||
|
||||
We appreciate packaging scripts for other distributions, please file a pull
|
||||
request if you write one.
|
||||
|
||||
If you prefer `pip` over your package manager, you can install autorandr with:
|
||||
|
||||
sudo pip install "git+http://github.com/phillipberndt/autorandr#egg=autorandr"
|
||||
|
||||
or simply
|
||||
|
||||
sudo pip install autorandr
|
||||
|
||||
if you prefer to use a stable version.
|
||||
|
||||
## How to use
|
||||
|
||||
Save your current display configuration and setup with:
|
||||
|
||||
autorandr --save mobile
|
||||
|
||||
Connect an additional display, configure your setup and save it:
|
||||
|
||||
autorandr --save docked
|
||||
|
||||
Now autorandr can detect which hardware setup is active:
|
||||
|
||||
$ autorandr
|
||||
mobile
|
||||
docked (detected)
|
||||
|
||||
To automatically reload your setup:
|
||||
|
||||
$ autorandr --change
|
||||
|
||||
To manually load a profile:
|
||||
|
||||
$ autorandr --load <profile>
|
||||
|
||||
or simply:
|
||||
|
||||
$ autorandr <profile>
|
||||
|
||||
autorandr tries to avoid reloading an identical configuration. To force the
|
||||
(re)configuration:
|
||||
|
||||
$ autorandr --load <profile> --force
|
||||
|
||||
To prevent a profile from being loaded, place a script call _block_ in its
|
||||
directory. The script is evaluated before the screen setup is inspected, and
|
||||
in case of it returning a value of 0 the profile is skipped. This can be used
|
||||
to query the status of a docking station you are about to leave.
|
||||
|
||||
If no suitable profile can be identified, the current configuration is kept.
|
||||
To change this behaviour and switch to a fallback configuration, specify
|
||||
`--default <profile>`. The system-wide installation of autorandr by default
|
||||
calls autorandr with a parameter `--default default`. There are three special,
|
||||
virtual configurations called `horizontal`, `vertical` and `common`. They
|
||||
automatically generate a configuration that incorporates all screens
|
||||
connected to the computer. You can symlink `default` to one of these
|
||||
names in your configuration directory to have autorandr use any of them
|
||||
as the default configuration without you having to change the system-wide
|
||||
configuration.
|
||||
|
||||
You can store default values for any option in an INI-file located at
|
||||
`~/.config/autorandr/settings.ini`. In a `config` section, you may place any
|
||||
default values in the form `option-name=option-argument`.
|
||||
|
||||
A common and effective use of this is to specify default `skip-options`, for
|
||||
instance skipping the `gamma` setting if using
|
||||
[`redshift`](https://github.com/jonls/redshift) as a daemon. To implement
|
||||
the equivalent of `--skip-options gamma`, your `settings.ini` file should look
|
||||
like this:
|
||||
|
||||
```
|
||||
[config]
|
||||
skip-options=gamma
|
||||
```
|
||||
|
||||
## Advanced usage
|
||||
|
||||
### Hook scripts
|
||||
|
||||
Three more scripts can be placed in the configuration directory
|
||||
(as defined by the [XDG spec](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html),
|
||||
usually `~/.config/autorandr` or `~/.autorandr` if you have an old installation
|
||||
for user configuration and `/etc/xdg/autorandr` for system wide configuration):
|
||||
|
||||
- `postswitch` is executed *after* a mode switch has taken place. This can be
|
||||
used to notify window managers or other applications about the switch.
|
||||
- `preswitch` is executed *before* a mode switch takes place.
|
||||
- `postsave` is executed after a profile was stored or altered.
|
||||
- `predetect` is executed before autorandr attempts to run xrandr.
|
||||
|
||||
These scripts must be executable and can be placed directly in the configuration
|
||||
directory, where they will always be executed, or in the profile subdirectories,
|
||||
where they will only be executed on changes regarding that specific profile.
|
||||
|
||||
Instead (or in addition) to these scripts, you can also place as many executable
|
||||
files as you like in subdirectories called `script_name.d` (e.g. `postswitch.d`).
|
||||
The order of execution of scripts in these directories is by file name, you can
|
||||
force a certain ordering by naming them `10-wallpaper`, `20-restart-wm`, etc.
|
||||
|
||||
If a script with the same name occurs multiple times, user configuration
|
||||
takes precedence over system configuration (as specified by the
|
||||
[XDG spec](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html))
|
||||
and profile configuration over general configuration.
|
||||
|
||||
As a concrete example, suppose you have the files
|
||||
|
||||
- `/etc/xdg/autorandr/postswitch`
|
||||
- `~/.config/autorandr/postswitch`
|
||||
- `~/.config/autorandr/postswitch.d/notify-herbstluftwm`
|
||||
- `~/.config/autorandr/docked/postswitch`
|
||||
|
||||
and switch from `mobile` to `docked`. Then
|
||||
`~/.config/autorandr/docked/postswitch` is executed, since the profile specific
|
||||
configuration takes precedence, and
|
||||
`~/.config/autorandr/postswitch.d/notify-herbstluftwm` is executed, since
|
||||
it has a unique name.
|
||||
|
||||
If you switch back from `docked` to `mobile`, `~/.config/autorandr/postswitch`
|
||||
is executed instead of the `docked` specific `postswitch`.
|
||||
|
||||
If you experience issues with xrandr being executed too early after connecting
|
||||
a new monitor, then you can use a `predetect` script to delay the execution.
|
||||
Write e.g. `sleep 1` into that file to make autorandr wait a second before
|
||||
running `xrandr`.
|
||||
|
||||
#### Variables
|
||||
|
||||
Some of autorandr's state is exposed as environment variables
|
||||
prefixed with `AUTORANDR_`, such as:
|
||||
- `AUTORANDR_CURRENT_PROFILE`
|
||||
- `AUTORANDR_CURRENT_PROFILES`
|
||||
- `AUTORANDR_PROFILE_FOLDER`
|
||||
- `AUTORANDR_MONITORS`
|
||||
|
||||
with the intention that they can be used within the hook scripts.
|
||||
|
||||
For instance, you might display which profile has just been activated by
|
||||
including the following in a `postswitch` script:
|
||||
```sh
|
||||
notify-send -i display "Display profile" "$AUTORANDR_CURRENT_PROFILE"
|
||||
```
|
||||
|
||||
The one kink is that during `preswitch`, `AUTORANDR_CURRENT_PROFILE` is
|
||||
reporting the *upcoming* profile rather than the *current* one.
|
||||
|
||||
### Wildcard EDID matching
|
||||
|
||||
The EDID strings in the `~/.config/autorandr/*/setup` files may contain an
|
||||
asterisk to enable wildcard matching: Such EDIDs are matched against connected
|
||||
monitors using the usual file name globbing rules. This can be used to create
|
||||
profiles matching multiple (or any) monitors.
|
||||
|
||||
### udev triggers with NVidia cards
|
||||
|
||||
In order for `udev` to detect `drm` events from the native NVidia driver, the
|
||||
kernel parameter `nvidia-drm.modeset` must be set to 1. For example, add a file
|
||||
`/etc/modprobe.d/nvidia-drm-modeset.conf`:
|
||||
|
||||
```
|
||||
options nvidia_drm modeset=1
|
||||
```
|
||||
|
||||
### Wayland
|
||||
|
||||
Before running autorandr will check the environment for the `WAYLAND_DISPLAY`
|
||||
variable to check if the program is running in a Wayland session. This is to
|
||||
avoid issues between usage of xrandr in Wayland environments.
|
||||
|
||||
If you need to run autorandr in a Wayland environment, one workaround is to
|
||||
unset the `WAYLAND_DISPLAY` variable before running the program, such as:
|
||||
|
||||
```
|
||||
WAYLAND_DISPLAY= autorandr
|
||||
```
|
||||
|
||||
## Changelog
|
||||
|
||||
**autorandr 1.15**
|
||||
* *2023-11-27* Several regex literal bug fixes
|
||||
* *2023-12-27* Fix #375: Listen to correct events in launcher
|
||||
* *2024-03-03* Fix #367: Skip profiles without outputs
|
||||
|
||||
|
||||
**autorandr 1.14**
|
||||
* *2023-06-22* Direct --match-edid renaming of output messages to stderr
|
||||
* *2023-06-22* Add Wayland awareness
|
||||
* *2023-06-22* Various minor auxiliary tooling bug fixes, see git-log
|
||||
|
||||
**autorandr 1.13.3**
|
||||
* *2023-01-24* Revert udev rule to rely on "change" event (see #324)
|
||||
|
||||
**autorandr 1.13.2**
|
||||
* *2023-01-23* Fix autostart in KDE (see #320)
|
||||
* *2023-01-23* Match add/remove rather than change in udev rule (see #321)
|
||||
* *2023-01-23* Fix wildcard use in EDIDs (see #322)
|
||||
* *2023-01-23* Do a final xrandr call to set the frame buffer size (see #319)
|
||||
|
||||
**autorandr 1.13.1**
|
||||
* *2023-01-16* Fix bug with Version comparison
|
||||
|
||||
**autorandr 1.13**
|
||||
* *2023-01-15* Add reversed horizontal/vertical profiles
|
||||
* *2023-01-15* Fix distutils deprecation warning
|
||||
* *2023-01-15* Print error when user script fails
|
||||
* *2022-12-01* Support `--skip-options set` to skip setting properties
|
||||
|
||||
**autorandr 1.12.1**
|
||||
* *2021-12-22* Fix `--match-edid` (see #273)
|
||||
|
||||
**autorandr 1.12**
|
||||
* *2021-12-16* Switch default interpreter to Python 3
|
||||
* *2021-12-16* Add `--list` to list all profiles
|
||||
* *2021-12-16* Add `--cycle` to cycle all detected profiles
|
||||
* *2021-12-16* Store display properties (see #204)
|
||||
|
||||
**autorandr 1.11**
|
||||
* *2020-05-23* Handle empty sys.executable
|
||||
* *2020-06-08* Fix Python 2 compatibility
|
||||
* *2020-10-06* Set group membership of users in batch mode
|
||||
|
||||
**autorandr 1.10.1**
|
||||
* *2020-05-04* Revert making the launcher the default (fixes #195)
|
||||
|
||||
**autorandr 1.10**
|
||||
* *2020-04-23* Fix hook script execution order to match description from readme
|
||||
* *2020-04-11* Handle negative gamma values (fixes #188)
|
||||
* *2020-04-11* Sort approximate matches in detected profiles by quality of match
|
||||
* *2020-01-31* Handle non-ASCII environment variables (fixes #180)
|
||||
* *2019-12-31* Fix output positioning if the top-left output is not the first
|
||||
* *2019-12-31* Accept negative gamma values (and interpret them as 0)
|
||||
* *2019-12-31* Prefer the X11 launcher over systemd/udev configuration
|
||||
|
||||
**autorandr 1.9**
|
||||
|
||||
* *2019-11-10* Count closed lids as disconnected outputs
|
||||
* *2019-10-05* Do not overwrite existing configurations without `--force`
|
||||
* *2019-08-16* Accept modes that don't match the WWWxHHH pattern
|
||||
* *2019-03-22* Improve bash autocompletion
|
||||
* *2019-03-21* Store CRTC values in configurations
|
||||
* *2019-03-24* Fix handling of recently disconnected outputs (See #128 and #143)
|
||||
|
||||
**autorandr 1.8.1**
|
||||
|
||||
* *2019-03-18* Removed mandb call from Makefile
|
||||
|
||||
**autorandr 1.8**
|
||||
|
||||
* *2019-02-17* Add an X11 daemon that runs autorandr when a display connects (by @rliou92, #127)
|
||||
* *2019-02-17* Replace width=0 check with disconnected to detect disconnected monitors (by @joseph-jones, #139)
|
||||
* *2019-02-17* Fix handling of empty padding (by @jschwab, #138)
|
||||
* *2019-02-17* Add a man page (by @somers-all-the-time, #133)
|
||||
|
||||
**autorandr 1.7**
|
||||
|
||||
* *2018-09-25* Fix FB size computation with rotated screens (by @Janno, #117)
|
||||
|
||||
**autorandr 1.6**
|
||||
|
||||
* *2018-04-19* Bugfix: Do not load default profile unless --change is set
|
||||
* *2018-04-30* Added a `AUTORANDR_MONITORS` variable to hooks (by @bricewge, #106)
|
||||
* *2018-06-29* Fix detection of current configuration if extra monitors are active
|
||||
* *2018-07-11* Bugfix in the latest change: Correctly handle "off" minitors when comparing
|
||||
* *2018-07-19* Do not kill spawned user processes from systemd unit
|
||||
* *2018-07-20* Correctly handle "off" monitors when comparing -- fixup for another bug.
|
||||
|
||||
**autorandr 1.5**
|
||||
|
||||
* *2018-01-03* Add --version
|
||||
* *2018-01-04* Fixed vertical/horizontal/clone-largest virtual profiles
|
||||
* *2018-03-07* Output all non-error messages to stdout instead of stderr
|
||||
* *2018-03-25* Add --detected and --current to filter the profile list output
|
||||
* *2018-03-25* Allow wildcard matching in EDIDs
|
||||
|
||||
**autorandr 1.4**
|
||||
|
||||
* *2017-12-22* Fixed broken virtual profile support
|
||||
* *2017-12-14* Added support for a settings file
|
||||
* *2017-12-14* Added a virtual profile `off`, which disables all screens
|
||||
|
||||
**autorandr 1.3**
|
||||
|
||||
* *2017-11-13* Add a short form for `--load`
|
||||
* *2017-11-21* Fix environment stealing in `--batch` mode (See #87)
|
||||
|
||||
**autorandr 1.2**
|
||||
|
||||
* *2017-07-16* Skip `--panning` unless it is required (See #72)
|
||||
* *2017-10-13* Add `clone-largest` virtual profile
|
||||
|
||||
**autorandr 1.1**
|
||||
|
||||
* *2017-06-07* Call systemctl with `--no-block` from udev rule (See #61)
|
||||
* *2017-01-20* New script hook, `predetect`
|
||||
* *2017-01-18* Accept comments (lines starting with `#`) in config/setup files
|
||||
|
||||
**autorandr 1.0**
|
||||
|
||||
* *2016-12-07* Tag the current code as version 1.0.0; see github issue #54
|
||||
* *2016-10-03* Install a desktop file to `/etc/xdg/autostart` by default
|
||||
|
||||
|
10
venv/lib/python3.11/site-packages/autorandr-1.15.post1.dist-info/RECORD
Executable file
10
venv/lib/python3.11/site-packages/autorandr-1.15.post1.dist-info/RECORD
Executable file
@ -0,0 +1,10 @@
|
||||
../../../bin/autorandr,sha256=b7-Kfx617lTXecYwDuiiUgSizM-HhcqaIKkFNj2U7_Y,273
|
||||
__pycache__/autorandr.cpython-311.pyc,,
|
||||
autorandr-1.15.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
autorandr-1.15.post1.dist-info/METADATA,sha256=WU3tSPIh4uxAwXtLlUMhAhf-D1CkTaRHmoqWpOoIrtA,16035
|
||||
autorandr-1.15.post1.dist-info/RECORD,,
|
||||
autorandr-1.15.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
autorandr-1.15.post1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
autorandr-1.15.post1.dist-info/entry_points.txt,sha256=nIbh59SOctw_HjgX7a8iEKc-TawiF-tiwbIs7Wh9H5w,64
|
||||
autorandr-1.15.post1.dist-info/top_level.txt,sha256=EZfqdN1OiuXc20FuQhKSxk4gieAvzWOXie8MgR29Aek,10
|
||||
autorandr.py,sha256=rRxKO3qME5aTlZ0zdkLsL9kP-pUFV1y_CZZ6-ukoJJo,75378
|
5
venv/lib/python3.11/site-packages/autorandr-1.15.post1.dist-info/WHEEL
Executable file
5
venv/lib/python3.11/site-packages/autorandr-1.15.post1.dist-info/WHEEL
Executable file
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1,3 @@
|
||||
[console_scripts]
|
||||
autorandr = autorandr:exception_handled_main
|
||||
|
@ -0,0 +1 @@
|
||||
autorandr
|
1704
venv/lib/python3.11/site-packages/autorandr.py
Executable file
1704
venv/lib/python3.11/site-packages/autorandr.py
Executable file
File diff suppressed because it is too large
Load Diff
1
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/INSTALLER
Executable file
1
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
201
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/LICENSE
Executable file
201
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/LICENSE
Executable file
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
330
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/METADATA
Executable file
330
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/METADATA
Executable file
@ -0,0 +1,330 @@
|
||||
Metadata-Version: 2.2
|
||||
Name: bcrypt
|
||||
Version: 4.3.0
|
||||
Summary: Modern password hashing for your software and your servers
|
||||
Author-email: The Python Cryptographic Authority developers <cryptography-dev@python.org>
|
||||
License: Apache-2.0
|
||||
Project-URL: homepage, https://github.com/pyca/bcrypt/
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Requires-Python: >=3.8
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: pytest!=3.3.0,>=3.2.1; extra == "tests"
|
||||
Provides-Extra: typecheck
|
||||
Requires-Dist: mypy; extra == "typecheck"
|
||||
|
||||
bcrypt
|
||||
======
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/bcrypt.svg
|
||||
:target: https://pypi.org/project/bcrypt/
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main
|
||||
:target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain
|
||||
|
||||
Acceptable password hashing for your software and your servers (but you should
|
||||
really use argon2id or scrypt)
|
||||
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
To install bcrypt, simply:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ pip install bcrypt
|
||||
|
||||
Note that bcrypt should build very easily on Linux provided you have a C
|
||||
compiler and a Rust compiler (the minimum supported Rust version is 1.56.0).
|
||||
|
||||
For Debian and Ubuntu, the following command will ensure that the required dependencies are installed:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ sudo apt-get install build-essential cargo
|
||||
|
||||
For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ sudo yum install gcc cargo
|
||||
|
||||
For Alpine, the following command will ensure that the required dependencies are installed:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ apk add --update musl-dev gcc cargo
|
||||
|
||||
|
||||
Alternatives
|
||||
============
|
||||
|
||||
While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_.
|
||||
|
||||
Changelog
|
||||
=========
|
||||
|
||||
Unreleased
|
||||
----------
|
||||
|
||||
* Dropped support for Python 3.7.
|
||||
* We now support free-threaded Python 3.13.
|
||||
* We now support PyPy 3.11.
|
||||
* We now publish wheels for free-threaded Python 3.13, for PyPy 3.11 on
|
||||
``manylinux``, and for ARMv7l on ``manylinux``.
|
||||
|
||||
4.2.1
|
||||
-----
|
||||
|
||||
* Bump Rust dependency versions - this should resolve crashes on Python 3.13
|
||||
free-threaded builds.
|
||||
* We no longer build ``manylinux`` wheels for PyPy 3.9.
|
||||
|
||||
4.2.0
|
||||
-----
|
||||
|
||||
* Bump Rust dependency versions
|
||||
* Removed the ``BCRYPT_ALLOW_RUST_163`` environment variable.
|
||||
|
||||
4.1.3
|
||||
-----
|
||||
|
||||
* Bump Rust dependency versions
|
||||
|
||||
4.1.2
|
||||
-----
|
||||
|
||||
* Publish both ``py37`` and ``py39`` wheels. This should resolve some errors
|
||||
relating to initializing a module multiple times per process.
|
||||
|
||||
4.1.1
|
||||
-----
|
||||
|
||||
* Fixed the type signature on the ``kdf`` method.
|
||||
* Fixed packaging bug on Windows.
|
||||
* Fixed incompatibility with passlib package detection assumptions.
|
||||
|
||||
4.1.0
|
||||
-----
|
||||
|
||||
* Dropped support for Python 3.6.
|
||||
* Bumped MSRV to 1.64. (Note: Rust 1.63 can be used by setting the ``BCRYPT_ALLOW_RUST_163`` environment variable)
|
||||
|
||||
4.0.1
|
||||
-----
|
||||
|
||||
* We now build PyPy ``manylinux`` wheels.
|
||||
* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in
|
||||
a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``.
|
||||
|
||||
4.0.0
|
||||
-----
|
||||
|
||||
* ``bcrypt`` is now implemented in Rust. Users building from source will need
|
||||
to have a Rust compiler available. Nothing will change for users downloading
|
||||
wheels.
|
||||
* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest
|
||||
``pip`` to ensure this doesn’t cause issues downloading wheels on their
|
||||
platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms.
|
||||
* ``NUL`` bytes are now allowed in inputs.
|
||||
|
||||
|
||||
3.2.2
|
||||
-----
|
||||
|
||||
* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works.
|
||||
|
||||
3.2.1
|
||||
-----
|
||||
|
||||
* Added support for compilation on z/OS
|
||||
* The next release of ``bcrypt`` with be 4.0 and it will require Rust at
|
||||
compile time, for users building from source. There will be no additional
|
||||
requirement for users who are installing from wheels. Users on most
|
||||
platforms will be able to obtain a wheel by making sure they have an up to
|
||||
date ``pip``. The minimum supported Rust version will be 1.56.0.
|
||||
* This will be the final release for which we ship ``manylinux2010`` wheels.
|
||||
Going forward the minimum supported manylinux ABI for our wheels will be
|
||||
``manylinux2014``. The vast majority of users will continue to receive
|
||||
``manylinux`` wheels provided they have an up to date ``pip``.
|
||||
|
||||
|
||||
3.2.0
|
||||
-----
|
||||
|
||||
* Added typehints for library functions.
|
||||
* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5).
|
||||
* Shipped ``abi3`` Windows wheels (requires pip >= 20).
|
||||
|
||||
3.1.7
|
||||
-----
|
||||
|
||||
* Set a ``setuptools`` lower bound for PEP517 wheel building.
|
||||
* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce
|
||||
them was a maintenance burden.
|
||||
|
||||
3.1.6
|
||||
-----
|
||||
|
||||
* Added support for compilation on Haiku.
|
||||
|
||||
3.1.5
|
||||
-----
|
||||
|
||||
* Added support for compilation on AIX.
|
||||
* Dropped Python 2.6 and 3.3 support.
|
||||
* Switched to using ``abi3`` wheels for Python 3. If you are not getting a
|
||||
wheel on a compatible platform please upgrade your ``pip`` version.
|
||||
|
||||
3.1.4
|
||||
-----
|
||||
|
||||
* Fixed compilation with mingw and on illumos.
|
||||
|
||||
3.1.3
|
||||
-----
|
||||
* Fixed a compilation issue on Solaris.
|
||||
* Added a warning when using too few rounds with ``kdf``.
|
||||
|
||||
3.1.2
|
||||
-----
|
||||
* Fixed a compile issue affecting big endian platforms.
|
||||
* Fixed invalid escape sequence warnings on Python 3.6.
|
||||
* Fixed building in non-UTF8 environments on Python 2.
|
||||
|
||||
3.1.1
|
||||
-----
|
||||
* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3.
|
||||
|
||||
3.1.0
|
||||
-----
|
||||
* Added support for ``checkpw``, a convenience method for verifying a password.
|
||||
* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt.
|
||||
* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug.
|
||||
* Fixed compilation under Alpine Linux.
|
||||
|
||||
3.0.0
|
||||
-----
|
||||
* Switched the C backend to code obtained from the OpenBSD project rather than
|
||||
openwall.
|
||||
* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function.
|
||||
|
||||
2.0.0
|
||||
-----
|
||||
* Added support for an adjustible prefix when calling ``gensalt``.
|
||||
* Switched to CFFI 1.0+
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Password Hashing
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Hashing and then later checking that a password matches the previous hashed
|
||||
password is very simple:
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import bcrypt
|
||||
>>> password = b"super secret password"
|
||||
>>> # Hash a password for the first time, with a randomly-generated salt
|
||||
>>> hashed = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
>>> # Check that an unhashed password matches one that has previously been
|
||||
>>> # hashed
|
||||
>>> if bcrypt.checkpw(password, hashed):
|
||||
... print("It Matches!")
|
||||
... else:
|
||||
... print("It Does not Match :(")
|
||||
|
||||
KDF
|
||||
~~~
|
||||
|
||||
As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``.
|
||||
This KDF is used in OpenSSH's newer encrypted private key format.
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import bcrypt
|
||||
>>> key = bcrypt.kdf(
|
||||
... password=b'password',
|
||||
... salt=b'salt',
|
||||
... desired_key_bytes=32,
|
||||
... rounds=100)
|
||||
|
||||
|
||||
Adjustable Work Factor
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
One of bcrypt's features is an adjustable logarithmic work factor. To adjust
|
||||
the work factor merely pass the desired number of rounds to
|
||||
``bcrypt.gensalt(rounds=12)`` which defaults to 12):
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import bcrypt
|
||||
>>> password = b"super secret password"
|
||||
>>> # Hash a password for the first time, with a certain number of rounds
|
||||
>>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14))
|
||||
>>> # Check that a unhashed password matches one that has previously been
|
||||
>>> # hashed
|
||||
>>> if bcrypt.checkpw(password, hashed):
|
||||
... print("It Matches!")
|
||||
... else:
|
||||
... print("It Does not Match :(")
|
||||
|
||||
|
||||
Adjustable Prefix
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Another one of bcrypt's features is an adjustable prefix to let you define what
|
||||
libraries you'll remain compatible with. To adjust this, pass either ``2a`` or
|
||||
``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object.
|
||||
|
||||
As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated.
|
||||
|
||||
Maximum Password Length
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The bcrypt algorithm only handles passwords up to 72 characters, any characters
|
||||
beyond that are ignored. To work around this, a common approach is to hash a
|
||||
password with a cryptographic hash (such as ``sha256``) and then base64
|
||||
encode it to prevent NULL byte problems before hashing the result with
|
||||
``bcrypt``:
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> password = b"an incredibly long password" * 10
|
||||
>>> hashed = bcrypt.hashpw(
|
||||
... base64.b64encode(hashlib.sha256(password).digest()),
|
||||
... bcrypt.gensalt()
|
||||
... )
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
This library should be compatible with py-bcrypt and it will run on Python
|
||||
3.8+ (including free-threaded builds), and PyPy 3.
|
||||
|
||||
Security
|
||||
--------
|
||||
|
||||
``bcrypt`` follows the `same security policy as cryptography`_, if you
|
||||
identify a vulnerability, we ask you to contact us privately.
|
||||
|
||||
.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html
|
||||
.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt
|
||||
.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io
|
||||
.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt
|
11
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/RECORD
Executable file
11
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/RECORD
Executable file
@ -0,0 +1,11 @@
|
||||
bcrypt-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
bcrypt-4.3.0.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
||||
bcrypt-4.3.0.dist-info/METADATA,sha256=95qX7ziIfmOF0kNM95YZuWhLVfFy-6EtssVvf1ZgeWg,10042
|
||||
bcrypt-4.3.0.dist-info/RECORD,,
|
||||
bcrypt-4.3.0.dist-info/WHEEL,sha256=XlovOtcAZFqrc4OSNBtc5R3yDeRHyhWP24RdDnylFpY,111
|
||||
bcrypt-4.3.0.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7
|
||||
bcrypt/__init__.py,sha256=cv-NupIX6P7o6A4PK_F0ur6IZoDr3GnvyzFO9k16wKQ,1000
|
||||
bcrypt/__init__.pyi,sha256=ITUCB9mPVU8sKUbJQMDUH5YfQXZb1O55F9qvKZR_o8I,333
|
||||
bcrypt/__pycache__/__init__.cpython-311.pyc,,
|
||||
bcrypt/_bcrypt.abi3.so,sha256=oMArVCuY_atg2H4SGNfM-zbfEgUOkd4qSiWn2nPqmXc,644928
|
||||
bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/WHEEL
Executable file
5
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/WHEEL
Executable file
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.8.2)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp39-abi3-manylinux_2_34_x86_64
|
||||
|
1
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/top_level.txt
Executable file
1
venv/lib/python3.11/site-packages/bcrypt-4.3.0.dist-info/top_level.txt
Executable file
@ -0,0 +1 @@
|
||||
bcrypt
|
43
venv/lib/python3.11/site-packages/bcrypt/__init__.py
Executable file
43
venv/lib/python3.11/site-packages/bcrypt/__init__.py
Executable file
@ -0,0 +1,43 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._bcrypt import (
|
||||
__author__,
|
||||
__copyright__,
|
||||
__email__,
|
||||
__license__,
|
||||
__summary__,
|
||||
__title__,
|
||||
__uri__,
|
||||
checkpw,
|
||||
gensalt,
|
||||
hashpw,
|
||||
kdf,
|
||||
)
|
||||
from ._bcrypt import (
|
||||
__version_ex__ as __version__,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__author__",
|
||||
"__copyright__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__summary__",
|
||||
"__title__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"checkpw",
|
||||
"gensalt",
|
||||
"hashpw",
|
||||
"kdf",
|
||||
]
|
10
venv/lib/python3.11/site-packages/bcrypt/__init__.pyi
Executable file
10
venv/lib/python3.11/site-packages/bcrypt/__init__.pyi
Executable file
@ -0,0 +1,10 @@
|
||||
def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: ...
|
||||
def hashpw(password: bytes, salt: bytes) -> bytes: ...
|
||||
def checkpw(password: bytes, hashed_password: bytes) -> bool: ...
|
||||
def kdf(
|
||||
password: bytes,
|
||||
salt: bytes,
|
||||
desired_key_bytes: int,
|
||||
rounds: int,
|
||||
ignore_few_rounds: bool = False,
|
||||
) -> bytes: ...
|
BIN
venv/lib/python3.11/site-packages/bcrypt/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/bcrypt/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/bcrypt/_bcrypt.abi3.so
Executable file
BIN
venv/lib/python3.11/site-packages/bcrypt/_bcrypt.abi3.so
Executable file
Binary file not shown.
0
venv/lib/python3.11/site-packages/bcrypt/py.typed
Executable file
0
venv/lib/python3.11/site-packages/bcrypt/py.typed
Executable file
1
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/INSTALLER
Executable file
1
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
20
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/LICENSE
Executable file
20
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/LICENSE
Executable file
@ -0,0 +1,20 @@
|
||||
This package contains a modified version of ca-bundle.crt:
|
||||
|
||||
ca-bundle.crt -- Bundle of CA Root Certificates
|
||||
|
||||
This is a bundle of X.509 certificates of public Certificate Authorities
|
||||
(CA). These were automatically extracted from Mozilla's root certificates
|
||||
file (certdata.txt). This file can be found in the mozilla source tree:
|
||||
https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
|
||||
It contains the certificates in PEM format and therefore
|
||||
can be directly used with curl / libcurl / php_curl, or with
|
||||
an Apache+mod_ssl webserver for SSL client authentication.
|
||||
Just configure this file as the SSLCACertificateFile.#
|
||||
|
||||
***** BEGIN LICENSE BLOCK *****
|
||||
This Source Code Form is subject to the terms of the Mozilla Public License,
|
||||
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
|
||||
one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
***** END LICENSE BLOCK *****
|
||||
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
|
77
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/METADATA
Executable file
77
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/METADATA
Executable file
@ -0,0 +1,77 @@
|
||||
Metadata-Version: 2.2
|
||||
Name: certifi
|
||||
Version: 2025.1.31
|
||||
Summary: Python package for providing Mozilla's CA Bundle.
|
||||
Home-page: https://github.com/certifi/python-certifi
|
||||
Author: Kenneth Reitz
|
||||
Author-email: me@kennethreitz.com
|
||||
License: MPL-2.0
|
||||
Project-URL: Source, https://github.com/certifi/python-certifi
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Requires-Python: >=3.6
|
||||
License-File: LICENSE
|
||||
Dynamic: author
|
||||
Dynamic: author-email
|
||||
Dynamic: classifier
|
||||
Dynamic: description
|
||||
Dynamic: home-page
|
||||
Dynamic: license
|
||||
Dynamic: project-url
|
||||
Dynamic: requires-python
|
||||
Dynamic: summary
|
||||
|
||||
Certifi: Python SSL Certificates
|
||||
================================
|
||||
|
||||
Certifi provides Mozilla's carefully curated collection of Root Certificates for
|
||||
validating the trustworthiness of SSL certificates while verifying the identity
|
||||
of TLS hosts. It has been extracted from the `Requests`_ project.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
``certifi`` is available on PyPI. Simply install it with ``pip``::
|
||||
|
||||
$ pip install certifi
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
To reference the installed certificate authority (CA) bundle, you can use the
|
||||
built-in function::
|
||||
|
||||
>>> import certifi
|
||||
|
||||
>>> certifi.where()
|
||||
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
|
||||
|
||||
Or from the command line::
|
||||
|
||||
$ python -m certifi
|
||||
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
|
||||
|
||||
Enjoy!
|
||||
|
||||
.. _`Requests`: https://requests.readthedocs.io/en/master/
|
||||
|
||||
Addition/Removal of Certificates
|
||||
--------------------------------
|
||||
|
||||
Certifi does not support any addition/removal or other modification of the
|
||||
CA trust store content. This project is intended to provide a reliable and
|
||||
highly portable root of trust to python deployments. Look to upstream projects
|
||||
for methods to use alternate trust.
|
14
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/RECORD
Executable file
14
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/RECORD
Executable file
@ -0,0 +1,14 @@
|
||||
certifi-2025.1.31.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
certifi-2025.1.31.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
|
||||
certifi-2025.1.31.dist-info/METADATA,sha256=t5kcT5aGu0dQ6_psUNZYTqnC0uCRnponewm3uYjeHbg,2451
|
||||
certifi-2025.1.31.dist-info/RECORD,,
|
||||
certifi-2025.1.31.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
||||
certifi-2025.1.31.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
|
||||
certifi/__init__.py,sha256=neIaAf7BM36ygmQCmy-ZsSyjnvjWghFeu13wwEAnjj0,94
|
||||
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
|
||||
certifi/__pycache__/__init__.cpython-311.pyc,,
|
||||
certifi/__pycache__/__main__.cpython-311.pyc,,
|
||||
certifi/__pycache__/core.cpython-311.pyc,,
|
||||
certifi/cacert.pem,sha256=xVsh-Qf3-G1IrdCTVS-1ZRdJ_1-GBQjMu0I9bB-9gMc,297255
|
||||
certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
|
||||
certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/WHEEL
Executable file
5
venv/lib/python3.11/site-packages/certifi-2025.1.31.dist-info/WHEEL
Executable file
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.8.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1 @@
|
||||
certifi
|
4
venv/lib/python3.11/site-packages/certifi/__init__.py
Executable file
4
venv/lib/python3.11/site-packages/certifi/__init__.py
Executable file
@ -0,0 +1,4 @@
|
||||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2025.01.31"
|
12
venv/lib/python3.11/site-packages/certifi/__main__.py
Executable file
12
venv/lib/python3.11/site-packages/certifi/__main__.py
Executable file
@ -0,0 +1,12 @@
|
||||
import argparse
|
||||
|
||||
from certifi import contents, where
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-c", "--contents", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.contents:
|
||||
print(contents())
|
||||
else:
|
||||
print(where())
|
BIN
venv/lib/python3.11/site-packages/certifi/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/certifi/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/certifi/__pycache__/__main__.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/certifi/__pycache__/__main__.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/certifi/__pycache__/core.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/certifi/__pycache__/core.cpython-311.pyc
Executable file
Binary file not shown.
4897
venv/lib/python3.11/site-packages/certifi/cacert.pem
Executable file
4897
venv/lib/python3.11/site-packages/certifi/cacert.pem
Executable file
File diff suppressed because it is too large
Load Diff
114
venv/lib/python3.11/site-packages/certifi/core.py
Executable file
114
venv/lib/python3.11/site-packages/certifi/core.py
Executable file
@ -0,0 +1,114 @@
|
||||
"""
|
||||
certifi.py
|
||||
~~~~~~~~~~
|
||||
|
||||
This module returns the installation location of cacert.pem or its contents.
|
||||
"""
|
||||
import sys
|
||||
import atexit
|
||||
|
||||
def exit_cacert_ctx() -> None:
|
||||
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
|
||||
from importlib.resources import as_file, files
|
||||
|
||||
_CACERT_CTX = None
|
||||
_CACERT_PATH = None
|
||||
|
||||
def where() -> str:
|
||||
# This is slightly terrible, but we want to delay extracting the file
|
||||
# in cases where we're inside of a zipimport situation until someone
|
||||
# actually calls where(), but we don't want to re-extract the file
|
||||
# on every call of where(), so we'll do it once then store it in a
|
||||
# global variable.
|
||||
global _CACERT_CTX
|
||||
global _CACERT_PATH
|
||||
if _CACERT_PATH is None:
|
||||
# This is slightly janky, the importlib.resources API wants you to
|
||||
# manage the cleanup of this file, so it doesn't actually return a
|
||||
# path, it returns a context manager that will give you the path
|
||||
# when you enter it and will do any cleanup when you leave it. In
|
||||
# the common case of not needing a temporary file, it will just
|
||||
# return the file system location and the __exit__() is a no-op.
|
||||
#
|
||||
# We also have to hold onto the actual context manager, because
|
||||
# it will do the cleanup whenever it gets garbage collected, so
|
||||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
atexit.register(exit_cacert_ctx)
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
def contents() -> str:
|
||||
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
|
||||
|
||||
elif sys.version_info >= (3, 7):
|
||||
|
||||
from importlib.resources import path as get_path, read_text
|
||||
|
||||
_CACERT_CTX = None
|
||||
_CACERT_PATH = None
|
||||
|
||||
def where() -> str:
|
||||
# This is slightly terrible, but we want to delay extracting the
|
||||
# file in cases where we're inside of a zipimport situation until
|
||||
# someone actually calls where(), but we don't want to re-extract
|
||||
# the file on every call of where(), so we'll do it once then store
|
||||
# it in a global variable.
|
||||
global _CACERT_CTX
|
||||
global _CACERT_PATH
|
||||
if _CACERT_PATH is None:
|
||||
# This is slightly janky, the importlib.resources API wants you
|
||||
# to manage the cleanup of this file, so it doesn't actually
|
||||
# return a path, it returns a context manager that will give
|
||||
# you the path when you enter it and will do any cleanup when
|
||||
# you leave it. In the common case of not needing a temporary
|
||||
# file, it will just return the file system location and the
|
||||
# __exit__() is a no-op.
|
||||
#
|
||||
# We also have to hold onto the actual context manager, because
|
||||
# it will do the cleanup whenever it gets garbage collected, so
|
||||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
atexit.register(exit_cacert_ctx)
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
def contents() -> str:
|
||||
return read_text("certifi", "cacert.pem", encoding="ascii")
|
||||
|
||||
else:
|
||||
import os
|
||||
import types
|
||||
from typing import Union
|
||||
|
||||
Package = Union[types.ModuleType, str]
|
||||
Resource = Union[str, "os.PathLike"]
|
||||
|
||||
# This fallback will work for Python versions prior to 3.7 that lack the
|
||||
# importlib.resources module but relies on the existing `where` function
|
||||
# so won't address issues with environments like PyOxidizer that don't set
|
||||
# __file__ on modules.
|
||||
def read_text(
|
||||
package: Package,
|
||||
resource: Resource,
|
||||
encoding: str = 'utf-8',
|
||||
errors: str = 'strict'
|
||||
) -> str:
|
||||
with open(where(), encoding=encoding) as data:
|
||||
return data.read()
|
||||
|
||||
# If we don't have importlib.resources, then we will just do the old logic
|
||||
# of assuming we're on the filesystem and munge the path directly.
|
||||
def where() -> str:
|
||||
f = os.path.dirname(__file__)
|
||||
|
||||
return os.path.join(f, "cacert.pem")
|
||||
|
||||
def contents() -> str:
|
||||
return read_text("certifi", "cacert.pem", encoding="ascii")
|
0
venv/lib/python3.11/site-packages/certifi/py.typed
Executable file
0
venv/lib/python3.11/site-packages/certifi/py.typed
Executable file
1
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/INSTALLER
Executable file
1
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/INSTALLER
Executable file
@ -0,0 +1 @@
|
||||
pip
|
26
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/LICENSE
Executable file
26
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/LICENSE
Executable file
@ -0,0 +1,26 @@
|
||||
|
||||
Except when otherwise stated (look for LICENSE files in directories or
|
||||
information at the beginning of each file) all software and
|
||||
documentation is licensed as follows:
|
||||
|
||||
The MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
|
40
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/METADATA
Executable file
40
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/METADATA
Executable file
@ -0,0 +1,40 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: cffi
|
||||
Version: 1.17.1
|
||||
Summary: Foreign Function Interface for Python calling C code.
|
||||
Home-page: http://cffi.readthedocs.org
|
||||
Author: Armin Rigo, Maciej Fijalkowski
|
||||
Author-email: python-cffi@googlegroups.com
|
||||
License: MIT
|
||||
Project-URL: Documentation, http://cffi.readthedocs.org/
|
||||
Project-URL: Source Code, https://github.com/python-cffi/cffi
|
||||
Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues
|
||||
Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html
|
||||
Project-URL: Downloads, https://github.com/python-cffi/cffi/releases
|
||||
Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Requires-Python: >=3.8
|
||||
License-File: LICENSE
|
||||
Requires-Dist: pycparser
|
||||
|
||||
|
||||
CFFI
|
||||
====
|
||||
|
||||
Foreign Function Interface for Python calling C code.
|
||||
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
|
||||
|
||||
Contact
|
||||
-------
|
||||
|
||||
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
|
48
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/RECORD
Executable file
48
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/RECORD
Executable file
@ -0,0 +1,48 @@
|
||||
_cffi_backend.cpython-311-x86_64-linux-gnu.so,sha256=K3Ig76G2fNGS7ef9yadiP-gNjpCHXd-J1ZNzvv6jfQs,1068624
|
||||
cffi-1.17.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
cffi-1.17.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294
|
||||
cffi-1.17.1.dist-info/METADATA,sha256=u6nuvP_qPJKu2zvIbi2zkGzVu7KjnnRIYUFyIrOY3j4,1531
|
||||
cffi-1.17.1.dist-info/RECORD,,
|
||||
cffi-1.17.1.dist-info/WHEEL,sha256=JyEZ6Cxo51rQOYRMkM7cW9w1CnY9FuHxSeIJCGoxjS4,151
|
||||
cffi-1.17.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75
|
||||
cffi-1.17.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
|
||||
cffi/__init__.py,sha256=H6t_ebva6EeHpUuItFLW1gbRp94eZRNJODLaWKdbx1I,513
|
||||
cffi/__pycache__/__init__.cpython-311.pyc,,
|
||||
cffi/__pycache__/_imp_emulation.cpython-311.pyc,,
|
||||
cffi/__pycache__/_shimmed_dist_utils.cpython-311.pyc,,
|
||||
cffi/__pycache__/api.cpython-311.pyc,,
|
||||
cffi/__pycache__/backend_ctypes.cpython-311.pyc,,
|
||||
cffi/__pycache__/cffi_opcode.cpython-311.pyc,,
|
||||
cffi/__pycache__/commontypes.cpython-311.pyc,,
|
||||
cffi/__pycache__/cparser.cpython-311.pyc,,
|
||||
cffi/__pycache__/error.cpython-311.pyc,,
|
||||
cffi/__pycache__/ffiplatform.cpython-311.pyc,,
|
||||
cffi/__pycache__/lock.cpython-311.pyc,,
|
||||
cffi/__pycache__/model.cpython-311.pyc,,
|
||||
cffi/__pycache__/pkgconfig.cpython-311.pyc,,
|
||||
cffi/__pycache__/recompiler.cpython-311.pyc,,
|
||||
cffi/__pycache__/setuptools_ext.cpython-311.pyc,,
|
||||
cffi/__pycache__/vengine_cpy.cpython-311.pyc,,
|
||||
cffi/__pycache__/vengine_gen.cpython-311.pyc,,
|
||||
cffi/__pycache__/verifier.cpython-311.pyc,,
|
||||
cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908
|
||||
cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055
|
||||
cffi/_embedding.h,sha256=EDKw5QrLvQoe3uosXB3H1xPVTYxsn33eV3A43zsA_Fw,18787
|
||||
cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960
|
||||
cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230
|
||||
cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169
|
||||
cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454
|
||||
cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731
|
||||
cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805
|
||||
cffi/cparser.py,sha256=0qI3mEzZSNVcCangoyXOoAcL-RhpQL08eG8798T024s,44789
|
||||
cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877
|
||||
cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584
|
||||
cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747
|
||||
cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797
|
||||
cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976
|
||||
cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374
|
||||
cffi/recompiler.py,sha256=sim4Tm7lamt2Jn8uzKN0wMYp6ODByk3g7of47-h9LD4,65367
|
||||
cffi/setuptools_ext.py,sha256=-ebj79lO2_AUH-kRcaja2pKY1Z_5tloGwsJgzK8P3Cc,8871
|
||||
cffi/vengine_cpy.py,sha256=8UagT6ZEOZf6Dju7_CfNulue8CnsHLEzJYhnqUhoF04,43752
|
||||
cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939
|
||||
cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182
|
6
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/WHEEL
Executable file
6
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/WHEEL
Executable file
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (74.1.1)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp311-cp311-manylinux_2_17_x86_64
|
||||
Tag: cp311-cp311-manylinux2014_x86_64
|
||||
|
2
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/entry_points.txt
Executable file
2
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/entry_points.txt
Executable file
@ -0,0 +1,2 @@
|
||||
[distutils.setup_keywords]
|
||||
cffi_modules = cffi.setuptools_ext:cffi_modules
|
2
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/top_level.txt
Executable file
2
venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/top_level.txt
Executable file
@ -0,0 +1,2 @@
|
||||
_cffi_backend
|
||||
cffi
|
14
venv/lib/python3.11/site-packages/cffi/__init__.py
Executable file
14
venv/lib/python3.11/site-packages/cffi/__init__.py
Executable file
@ -0,0 +1,14 @@
|
||||
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
||||
'FFIError']
|
||||
|
||||
from .api import FFI
|
||||
from .error import CDefError, FFIError, VerificationError, VerificationMissing
|
||||
from .error import PkgConfigError
|
||||
|
||||
__version__ = "1.17.1"
|
||||
__version_info__ = (1, 17, 1)
|
||||
|
||||
# The verifier module file names are based on the CRC32 of a string that
|
||||
# contains the following version number. It may be older than __version__
|
||||
# if nothing is clearly incompatible.
|
||||
__version_verifier_modules__ = "0.8.6"
|
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/__init__.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/__init__.cpython-311.pyc
Executable file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/api.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/api.cpython-311.pyc
Executable file
Binary file not shown.
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/cffi_opcode.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/cffi_opcode.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/commontypes.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/commontypes.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/cparser.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/cparser.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/error.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/error.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/ffiplatform.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/ffiplatform.cpython-311.pyc
Executable file
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/lock.cpython-311.pyc
Executable file
BIN
venv/lib/python3.11/site-packages/cffi/__pycache__/lock.cpython-311.pyc
Executable file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user