This commit is contained in:
2025-11-19 01:05:44 +01:00
parent 43637f5b28
commit 23b91f95b1

View File

@@ -9,7 +9,7 @@ import time
from tabulate import tabulate
from git import Repo # pip install gitpython
from port import Portainer
import logging
parser = argparse.ArgumentParser(description="Portainer helper - use env vars or pass credentials.")
parser.add_argument("--base", "-b", default=os.getenv("PORTAINER_URL", "https://portainer.example.com"),
help="Base URL for Portainer (ENV: PORTAINER_URL)")
@@ -17,16 +17,22 @@ parser.add_argument("--user", "-u", default=os.getenv("PORTAINER_USER"),
help="Portainer username (ENV: PORTAINER_USER)")
parser.add_argument("--password", "-p", default=os.getenv("PORTAINER_PASS"),
help="Portainer password (ENV: PORTAINER_PASS)")
parser.add_argument("--endpoint-id", "-e", type=str, help="Endpoint ID to limit stack operations")
parser.add_argument("--list-endpoints", action="store_true", help="List endpoints")
parser.add_argument("--endpoint-id", "-e", type=str, default="all", help="Endpoint ID to limit stack operations")
parser.add_argument("--refresh-environment", "-R", action="store_true", help="List endpoints")
parser.add_argument("--list-endpoints","-E", action="store_true", help="List endpoints")
parser.add_argument("--list-stacks", "-l", action="store_true", help="List stacks")
parser.add_argument("--print-all-data", "-A", action="store_true", help="List stacks")
parser.add_argument("--list-containers", "-c", action="store_true", help="List containers")
parser.add_argument("--update-stack", "-U", action="store_true", help="Update stacls")
parser.add_argument("--stop-containers", "-O", action="store_true", help="Stop containers")
parser.add_argument("--start-containers", "-X", action="store_true", help="Stop containers")
parser.add_argument("--delete-stack", "-d", action="store_true", help="Delete stack")
parser.add_argument("--update-status", "-S", action="store_true", help="Update status")
parser.add_argument("--get-stack", metavar="NAME_OR_ID", help="Get stack by name or numeric id")
parser.add_argument("--autostart", "-a", action="store_true", help="Auto-start created stacks")
parser.add_argument("--start-stack", "-x", action='store_true')
parser.add_argument("--stop-stack", "-o", action='store_true')
parser.add_argument("--debug", "-D", action='store_true')
parser.add_argument("--create-stack1","-c", action='store_true')
parser.add_argument("--create-stack","-n", action='store_true')
parser.add_argument("--create-stack_new2","-N", action='store_true')
parser.add_argument("--gpu","-g", action='store_true')
@@ -37,10 +43,24 @@ parser.add_argument("--token-only", action="store_true", help="Print auth token
parser.add_argument("--timeout", type=int, default=10, help="Request timeout seconds")
parser.add_argument("--deploy-mode","-m", type=str, default="git", help="Deploy mode")
args = parser.parse_args()
_LOG_LEVEL = "INFO"
LOG_FILE = "/tmp/portainer.log"
if _LOG_LEVEL == "DEBUG":
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.debug('using debug loging')
elif _LOG_LEVEL == "ERROR":
logging.basicConfig(filename=LOG_FILE, level=logging.ERROR, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('using error loging')
elif _LOG_LEVEL == "SCAN":
logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('using error loging')
else:
logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info("script started")
logger = logging.getLogger(__name__)
portainer_api_key = "ptr_QoHE/e6kfqIirE3fhzYMRFRxK7eL42wtCxo5P18i1zQ="
portainer_api_key = "ptr_GCNUoFcTOaXm7k8ZxPdQGmrFIamxZPTydbserYofMHc="
def wl(msg):
if args.debug:
print(msg)
@@ -72,264 +92,7 @@ def get_portainer_token(base_url, username=None, password=None, timeout=10):
if not token:
raise ValueError(f"No token found in response: {data}")
return token
def api_get(base_url, path, token, timeout=10):
"""Example authenticated GET request to Portainer API."""
url = f"{base_url.rstrip('/')}{path}"
headers = {"Authorization": f"Bearer {token}"}
headers = {"X-API-Key": f"{token}"}
resp = requests.get(url, headers=headers, timeout=timeout)
resp.raise_for_status()
return resp.json()
def api_post(base_url, path, token, data, timeout=240):
"""Example authenticated GET request to Portainer API."""
url = f"{base_url.rstrip('/')}{path}"
headers = {"Authorization": f"Bearer {token}"}
headers = {"X-API-Key": f"{token}"}
resp = requests.post(url, headers=headers, json=data, timeout=timeout)
resp.raise_for_status()
return resp.json()
def api_post2(base_url, path, token, endpoint_id, name, envs, file, timeout=240):
#input("API POST2 called. Press Enter to continue.")
"""Example authenticated GET request to Portainer API."""
url = f"{base_url.rstrip('/')}{path}"
headers = {"Authorization": f"Bearer {token}"}
headers = {"X-API-Key": f"{token}"}
data = {
"EndpointId": endpoint_id,
"Name": name,
"Env": json.dumps(envs)
}
print(data)
resp = requests.post(url, headers=headers, files=file, data=data, timeout=timeout)
resp.raise_for_status()
return resp.json()
def api_post_no_body(base_url, path, token, timeout=120):
"""Example authenticated GET request to Portainer API."""
url = f"{base_url.rstrip('/')}{path}"
headers = {"Authorization": f"Bearer {token}"}
headers = {"X-API-Key": f"{token}"}
resp = requests.post(url, headers=headers, timeout=timeout)
resp.raise_for_status()
return resp.json()
def api_delete(base_url, path, token, timeout=20):
"""Example authenticated DELETE request to Portainer API."""
wl("Deleting stack via API...")
url = f"{base_url.rstrip('/')}{path}"
headers = {"Authorization": f"Bearer {token}"}
headers = {"X-API-Key": f"{token}"}
resp = requests.delete(url, headers=headers, timeout=timeout)
wl(resp)
resp.raise_for_status()
wl(resp.status_code)
return resp.status_code
def refresh_status(base_url, token, stack, timeout=20):
path = f"/api/stacks/{stack}/images_status?refresh=true"
wl(path)
stacks = api_get(base_url, path, token, timeout=timeout)
wl(json.dumps(stacks, indent=2))
if stcks is None:
return []
return stcks
def get_stacks(base_url, token, endpoint_id=None, timeout=10):
"""
Return a list of stacks. If endpoint_id is provided, it will be added as a query param.
"""
path = "/api/stacks"
stcks = []
stacks = api_get(base_url, path, token, timeout=timeout)
for s in stacks:
if s['EndpointId'] == endpoint_id:
stcks.append(s)
wl(json.dumps(stacks, indent=2))
if stcks is None:
return []
return stcks
def get_stack(base_url, identifier, token, endpoint_id=None, timeout=10):
wl("get_stack")
"""
Retrieve a single stack by numeric Id or by Name.
Identifier may be an int (Id) or a string (Name). Raises ValueError if not found.
"""
#print(endpoint_id)
stacks = get_stacks(base_url, token, endpoint_id=endpoint_id, timeout=timeout)
# Normalize identifier
#input(stacks)
ident_id = None
#print(identifier)
try:
ident_id = int(identifier)
except (TypeError, ValueError):
pass
#wl(stacks)
for s in stacks:
# Many Portainer responses use 'Id' and 'Name' keys
if ident_id is not None and s.get("Id") == ident_id and endpoint_id == s.get("EndpointId"):
return s
if str(s.get("Name")) == str(identifier) and endpoint_id == s.get("EndpointId"):
return s
raise ValueError(f"Stack not found: {identifier}")
def create_stack(base_url, token, endpoint_id=None, data={}, timeout=300):
"""
Return a list of stacks. If endpoint_id is provided, it will be added as a query param.
"""
path = "/api/stacks/create/standalone/repository"
if endpoint_id is not None:
path += f"?endpointId={endpoint_id}"
wl(path)
try:
stacks = api_post(base_url, path, token, data, timeout=timeout)
except Exception as e:
#print(f"Error creating stack: {e}")
if "Conflict for url" in str(e):
print("Stack with this name may already exist.")
else:
print(f"Error creating stack: {e}")
wl(json.dumps(data, indent=2))
return []
if stacks is None:
return []
return stacks
def create_stack2(base_url, token, endpoint_id=None, name="", file="", envs=[], timeout=300):
#input("Creating stack from file... Press Enter to continue.")
"""
Return a list of stacks. If endpoint_id is provided, it will be added as a query param.
"""
path = "/api/stacks/create/standalone/file"
if endpoint_id is not None:
path += f"?endpointId={endpoint_id}&name={name}"
wl(path)
try:
stacks = api_post2(base_url, path, token, endpoint_id, name=name, envs=envs, file=file, timeout=timeout)
except Exception as e:
print(f"Error creating stack: {e}")
if "Conflict for url" in str(e):
print("Stack with this name may already exist.")
else:
print(json.dumps(envs, indent=2))
return []
if stacks is None:
return []
return stacks
def stop_stack(base_url, token, endpoint_id=None, stack=None, timeout=120):
path = f"/api/stacks/{stack}/stop"
if endpoint_id is not None:
path += f"?endpointId={endpoint_id}"
try:
api_post_no_body(base_url, path, token, timeout=timeout)
except Exception as e:
print(f"Error stoping stack: {e}")
return []
return True
def start_stack(base_url, token, endpoint_id=None, stack=None, timeout=120):
path = f"/api/stacks/{stack}/start"
if endpoint_id is not None:
path += f"?endpointId={endpoint_id}"
try:
stacks = api_post_no_body(base_url, path, token, timeout=timeout)
except Exception as e:
print(f"Error starting stack: {e}")
return []
if stacks is None:
return []
return stacks
def delete_stack(base_url, token, endpoint_id=None, stack=None, timeout=120):
"""
Return a list of stacks. If endpoint_id is provided, it will be added as a query param.
"""
if stack == "all":
stacks = get_stacks(base_url, token, endpoint_id=endpoint_id, timeout=timeout)
for s in stacks:
#print(s['EndpointId'], endpoint_id)
if int(s['EndpointId']) != int(endpoint_id):
continue
print("Deleting stack:", s['Name'])
path = f"/api/stacks/{s['Id']}"
if endpoint_id is not None:
path += f"?endpointId={endpoint_id}&removeVolumes=true"
if args.debug:
print(path)
out = api_delete(base_url, path, token, timeout=timeout)
return "Done"
else:
path = f"/api/stacks/{stack}"
if endpoint_id is not None:
path += f"?endpointId={endpoint_id}&removeVolumes=true"
# print(path)
try:
# print(path)
# print(base_url)
# print(token)
stacks = api_delete(base_url, path, token, timeout=timeout)
except Exception as e:
#print(f"Error creating stack: {e}")
if "Conflict for url" in str(e):
print("Stack with this name may already exist.")
else:
print(f"Error deleting stack: {e}")
#print(stacks)
return []
if stacks is None:
return []
return stacks
def print_stacks(base, token, endpoint=None,endpoints={}):
stacks = get_stacks(base, token, endpoint)
count = 0
lst = []
data = []
#print(stacks)
for stack in stacks:
#print(endpoint)
#print(stack['EndpointId'])
#print(stack)
if endpoint != None:
if not stack['EndpointId'] in endpoints['by_id']:
continue
if endpoint != stack['EndpointId']:
continue
if not stack['Name'] in basic_stacks:
lst.append(stack['Name'])
try:
#print(f"Stack ID: {stack['Id']}, Name: {stack['Name']}, EndpointName: {eps[stack['EndpointId']]}")
data.append([stack['Id'], stack['Name'], endpoints['by_id'][stack['EndpointId']]])
except KeyError as e:
#print(f"Stack ID: {stack['Id']}, Name: {stack['Name']}, EndpointName: ?")
data.append([stack['Id'], stack['Name'], "?"])
count += 1
headers = ["StackID", "Name", "Endpoint"]
print(tabulate(data, headers=headers, tablefmt="github"))
print(f"Total stacks: {count}")
def resolve_endpoins(base,token):
base = os.getenv("PORTAINER_URL", "https://portainer.sectorq.eu")
endpoints = api_get(base, "/api/endpoints", token)
eps = {"by_id":{}, "by_name":{}}
for ep in endpoints:
eps['by_id'][ep['Id']] = ep['Name']
eps['by_name'][ep['Name']] = ep['Id']
return eps
if __name__ == "__main__":
# Example usage: set PORTAINER_USER and PORTAINER_PASS in env, or pass literals below.
base = os.getenv("PORTAINER_URL", "https://portainer.sectorq.eu/api")
@@ -337,470 +100,89 @@ if __name__ == "__main__":
token = portainer_api_key
# Example: list endpoints
por = Portainer(base, token)
if args.delete_stack:
por.delete_stack(args.endpoint_id,args.stack,)
sys.exit()
if args.create_stack:
if args.create_stack:
por.create_stack(args.endpoint_id,args.stack, args.deploy_mode, args.autostart)
sys.exit()
#print(por.base_url)
if args.stop_stack:
por.stop_stack(args.stack,args.endpoint_id)
sys.exit()
#print(por.base_url)
if args.start_stack:
por.start_stack(args.stack,args.endpoint_id)
sys.exit()
#print(por.base_url)
endpoints = resolve_endpoins(base, token)
wl(endpoints)
if is_number(args.endpoint_id):
install_endpoint_id = int(args.endpoint_id)
install_endpoint_name = endpoints['by_id'][install_endpoint_id]
else:
install_endpoint_id = endpoints['by_name'][args.endpoint_id]
install_endpoint_name = args.endpoint_id
wl(install_endpoint_name)
wl(install_endpoint_id)
if args.list_stacks:
print_stacks(base, token, install_endpoint_id,endpoints)
por.print_stacks(args.endpoint_id)
print(json.dumps(por.all_data,indent=2))
sys.exit()
if args.create_stack_new21:
print("Creating new stack from git repo...")
if not args.stack_id:
args.stack_id = input("Stack name? : ")
if install_endpoint_id == None:
install_endpoint_id = endpoints['by_id'][input("Endpoint name? : ")]
if is_number(install_endpoint_id):
install_endpoint_id = int(install_endpoint_id)
else:
install_endpoint_id = endpoints['by_name'][install_endpoint_id]
git_url = "https://gitlab.sectorq.eu/home/docker-compose.git"
git_url = "git@gitlab.sectorq.eu:home/docker-compose.git"
repo_dir = "/tmp/docker-compose"
# Check if folder exists
if os.path.exists(repo_dir):
shutil.rmtree(repo_dir)
print(f"Folder '{repo_dir}' has been removed.")
else:
print(f"Folder '{repo_dir}' does not exist.")
Repo.clone_from(git_url, repo_dir)
if args.list_containers:
print("Getting containers")
por.get_containers(args.endpoint_id,args.stack)
sys.exit()
wl(args.stack_id)
wl(install_endpoint_id)
if args.stack_id == "all":
wl("All stacks selected")
if install_endpoint_name == "nas":
args.stack_id = nas_stacks
elif install_endpoint_name == "rpi5":
print("RPI5 stacks selected")
args.stack_id = rpi5_stacks
elif install_endpoint_name == "rack":
args.stack_id = rack_stacks
else:
args.stack_id = [args.stack_id]
for s in args.stack_id:
print(f"Processing stack: {s}")
if os.path.exists(f"{repo_dir}/{s}/.env"):
f = open(f"{repo_dir}/{s}/.env","r")
env_vars = f.read().splitlines()
envs = []
for ev in env_vars:
if ev.startswith("#") or ev.strip() == "":
continue
if "=" in ev:
name, value = ev.split("=",1)
envs.append({"name": name, "value": value})
f.close()
#wl(envs)
for e in envs:
wl(f"Env: {e['name']} = {e['value']}")
HWS = ["HW_MODE","HW_MODE1","HW_MODE2"]
if e['name'] == "RESTART" and args.endpoint_id == "m-server":
e['value'] = "always"
if e['name'] in HWS:
wl("Found HW_MODE env var.")
if args.gpu:
e['value'] = "hw"
else:
e['value'] = "cpu"
if e['name'] == "LOGGING":
wl("Found LOGGING env var.")
if args.gpu:
e['value'] = "journald"
else:
e['value'] = "syslog"
req = {
"Name": s,
"Env": envs,
"AdditionalFiles": [],
"AutoUpdate": None,
"repositoryURL": "https://gitlab.sectorq.eu/home/docker-compose.git",
"ReferenceName": "refs/heads/main",
"composeFile": f"{s}/docker-compose.yml",
"ConfigFilePath": f"{s}/docker-compose.yml",
"repositoryAuthentication": True,
"repositoryUsername": "jaydee",
"repositoryPassword": "glpat-uj-n-eEfTY398PE4vKSS",
"AuthorizationType": 0,
"TLSSkipVerify": False,
"supportRelativePath": True,
"repositoryAuthentication": True,
"fromAppTemplate": False,
"registries": [6,3],
"FromAppTemplate": False,
"Namespace": "",
"CreatedByUserId": "",
"Webhook": "",
"filesystemPath": "/share/docker_data/portainer/portainer-data/",
"RegistryID": 4,
"isDetachedFromGit": True
}
#wl(json.dumps(req, indent=2))
create_stack(base, token, install_endpoint_id, data=req)
if not args.autostart and s != "pihole":
tries = 0
while True:
try:
stck2 = get_stack(base, s, token, endpoint_id=install_endpoint_id)
break
except Exception as e:
print(f"Waiting for stack {s} to be created...")
time.sleep(10)
tries += 1
if tries > 20:
print(f"Error retrieving stack {s} after creation: {e}")
break
try:
print(f"Stopping stack: ID {stck2['Id']}, Name: {stck2['Name']}")
stop_stack(base, token, install_endpoint_id, stck2['Id'])
except Exception as e:
print(f"Error stopping stack {s}: {e}")
if args.update_stack:
print("Updating stacks")
autostart=True if args.autostart else False
por.update_stack(args.endpoint_id,args.stack,autostart)
sys.exit()
if args.print_all_data:
print(json.dumps(por.all_data,indent=2))
sys.exit()
if args.update_status:
por.update_status(args.endpoint_id,args.stack)
sys.exit()
if args.list_endpoints:
eps = por.get_endpoints()
data = []
for i in eps["by_id"]:
data.append([i,eps["by_id"][i]])
headers = ["EndpointId", "Name"]
print(tabulate(data, headers=headers, tablefmt="github"))
if args.create_stack_new2:
print("Creating new stack from file...")
if not args.stack_id:
args.stack_id = input("Stack name? : ")
if install_endpoint_id == None:
install_endpoint_id = endpoints['by_id'][input("Endpoint name? : ")]
if is_number(install_endpoint_id):
install_endpoint_id = int(install_endpoint_id)
else:
install_endpoint_id = endpoints['by_name'][install_endpoint_id]
git_url = "https://gitlab.sectorq.eu/home/docker-compose.git"
git_url = "git@gitlab.sectorq.eu:home/docker-compose.git"
repo_dir = "/tmp/docker-compose"
sys.exit()
# Check if folder exists
if os.path.exists(repo_dir):
shutil.rmtree(repo_dir)
print(f"Folder '{repo_dir}' has been removed.")
else:
print(f"Folder '{repo_dir}' does not exist.")
Repo.clone_from(git_url, repo_dir)
wl(args.stack_id)
wl(install_endpoint_id)
if args.stack_id == "all":
wl("All stacks selected")
if install_endpoint_name == "nas":
args.stack_id = nas_stacks
elif install_endpoint_name == "rpi5":
print("RPI5 stacks selected")
args.stack_id = rpi5_stacks
elif install_endpoint_name == "rack":
args.stack_id = rack_stacks
else:
args.stack_id = [args.stack_id]
for s in args.stack_id:
# file = f"/tmp/docker-compose/{s}/docker-compose.yml"
print(f"Processing stack: {s}")
file = {
# ("filename", file_object)
"file": ("docker-compose.yml", open(f"/tmp/docker-compose/{s}/docker-compose.yml", "rb")),
}
print(file)
if os.path.exists(f"{repo_dir}/{s}/.env"):
f = open(f"{repo_dir}/{s}/.env","r")
env_vars = f.read().splitlines()
envs = []
for ev in env_vars:
if ev.startswith("#") or ev.strip() == "":
continue
if "=" in ev:
name, value = ev.split("=",1)
envs.append({"name": name, "value": value})
f.close()
#wl(envs)
for e in envs:
wl(f"Env: {e['name']} = {e['value']}")
HWS = ["HW_MODE","HW_MODE1","HW_MODE2"]
if e['name'] in HWS:
wl("Found HW_MODE env var.")
if args.gpu:
e['value'] = "hw"
else:
e['value'] = "cpu"
if e['name'] == "LOGGING":
wl("Found LOGGING env var.")
if args.gpu:
e['value'] = "journald"
else:
e['value'] = "syslog"
create_stack2(base, token, install_endpoint_id, name=s, file=file, envs=envs)
if not args.autostart and s != "pihole":
tries = 0
while True:
try:
stck2 = get_stack(base, s, token, endpoint_id=install_endpoint_id)
break
except Exception as e:
print(f"Waiting for stack {s} to be created...")
time.sleep(2)
tries += 1
if tries > 5:
print(f"Error retrieving stack {s} after creation: {e}")
break
try:
print(f"Stopping stack: ID {stck2['Id']}, Name: {stck2['Name']}")
stop_stack(base, token, install_endpoint_id, stck2['Id'])
except Exception as e:
print(f"Error stopping stack {s}: {e}")
#print(json.dumps(req, indent=2))
if args.create_stack:
if not args.stack_id:
input("Stack name?")
stck = get_stack(base, args.stack_id, token)
print(f"Found stack: ID {stck['Id']}, Name: {stck['Name']}")
print(json.dumps(stck, indent=2))
for e in stck["Env"]:
print(f"Env: {e['name']} = {e['value']}")
HWS = ["HW_MODE","HW_MODE1","HW_MODE2"]
if e['name'] in HWS:
print("Found HW_MODE env var.")
if args.gpu:
e['value'] = "hw"
else:
e['value'] = "cpu"
if e['name'] == "LOGGING":
print("Found LOGGING env var.")
if args.gpu:
e['value'] = "journald"
else:
e['value'] = "syslog"
#print(json.dumps(stck, indent=2))
uid = str(uuid.uuid4())
try:
stck["AutoUpdate"]["Webhook"] = uid
except:
stck["AutoUpdate"] = None
try:
req = {
"Name": stck["Name"],
"Env": stck["Env"],
"AdditionalFiles": stck["AdditionalFiles"],
"AutoUpdate": stck["AutoUpdate"],
"repositoryURL": stck["GitConfig"]["URL"],
"ReferenceName": "refs/heads/main",
"composeFile": f"{stck['Name']}/docker-compose.yml",
"ConfigFilePath": f"{stck['Name']}/docker-compose.yml",
"repositoryAuthentication": True,
"repositoryUsername": "jaydee",
"repositoryPassword": "glpat-uj-n-eEfTY398PE4vKSS",
"AuthorizationType": 0,
"TLSSkipVerify": False,
"supportRelativePath": True,
"repositoryAuthentication": True,
"fromAppTemplate": False,
"registries": [6,3],
"FromAppTemplate": False,
"Namespace": "",
"CreatedByUserId": "",
"Webhook": "",
"filesystemPath": "/share/docker_data/portainer/portainer-data/stacks",
"RegistryID": 4
}
except:
req = {
"Name": stck["Name"],
"Env": stck["Env"],
"AdditionalFiles": stck["AdditionalFiles"],
"AutoUpdate": None,
"repositoryURL": "https://gitlab.sectorq.eu/home/docker-compose.git",
"ReferenceName": "refs/heads/main",
"composeFile": f"{stck['Name']}/docker-compose.yml",
"ConfigFilePath": f"{stck['Name']}/docker-compose.yml",
"repositoryAuthentication": True,
"repositoryUsername": "jaydee",
"repositoryPassword": "glpat-uj-n-eEfTY398PE4vKSS",
"AuthorizationType": 0,
"TLSSkipVerify": False,
"supportRelativePath": True,
"repositoryAuthentication": True,
"fromAppTemplate": False,
"registries": [6,3],
"FromAppTemplate": False,
"Namespace": "",
"CreatedByUserId": "",
"Webhook": "",
"filesystemPath": "/share/docker_data/portainer/portainer-data/stacks",
"RegistryID": 6
}
print(json.dumps(req, indent=2))
create_stack(base, token, install_endpoint_id, data=req)
if args.create_stacks:
if args.endpoint_id == "nas":
s = nas_stacks
elif args.endpoint_id == "rpi5":
s = rpi5_stacks
elif args.endpoint_id == "rack":
s = rack_stacks
wl(s)
for ns in s:
wl(f"Processing stack: {ns}")
stck = get_stack(base, ns, token)
print(f"Found stack: ID {stck['Id']}, Name: {stck['Name']}")
#print(json.dumps(stck, indent=2))
for e in stck["Env"]:
#print(f"Env: {e['name']} = {e['value']}")
if e['name'] == "RESTART" and stck["Env"] == install_endpoint_id:
e['value'] = "always"
HWS = ["HW_MODE","HW_MODE1","HW_MODE2"]
if e['name'] in HWS:
print("Found HW_MODE env var.")
if args.gpu:
e['value'] = "hw"
else:
e['value'] = "cpu"
if e['name'] == "LOGGING":
print("Found LOGGING env var.")
if args.gpu:
e['value'] = "journald"
else:
e['value'] = "syslog"
#print(stck["Env"])
uid = str(uuid.uuid4())
try:
stck["AutoUpdate"]["Webhook"] = uid
except:
stck["AutoUpdate"] = None
try:
req = {
"Name": stck["Name"],
"Env": stck["Env"],
"AdditionalFiles": stck["AdditionalFiles"],
"AutoUpdate": stck["AutoUpdate"],
"repositoryURL": stck["GitConfig"]["URL"],
"ReferenceName": "refs/heads/main",
"composeFile": f"{stck['Name']}/docker-compose.yml",
"ConfigFilePath": f"{stck['Name']}/docker-compose.yml",
"repositoryAuthentication": True,
"repositoryUsername": "jaydee",
"repositoryPassword": "glpat-uj-n-eEfTY398PE4vKSS",
"AuthorizationType": 0,
"TLSSkipVerify": False,
"supportRelativePath": True,
"repositoryAuthentication": True,
"fromAppTemplate": False,
"registries": [6,3],
"FromAppTemplate": False,
"Namespace": "",
"CreatedByUserId": "",
"Webhook": "",
"filesystemPath": "/share/docker_data/portainer/portainer-data/stacks",
"RegistryID": 4
}
except:
req = {
"Name": stck["Name"],
"Env": stck["Env"],
"AdditionalFiles": stck["AdditionalFiles"],
"AutoUpdate": None,
"repositoryURL": "https://gitlab.sectorq.eu/home/docker-compose.git",
"ReferenceName": "refs/heads/main",
"composeFile": f"{stck['Name']}/docker-compose.yml",
"ConfigFilePath": f"{stck['Name']}/docker-compose.yml",
"repositoryAuthentication": True,
"repositoryUsername": "jaydee",
"repositoryPassword": "glpat-uj-n-eEfTY398PE4vKSS",
"AuthorizationType": 0,
"TLSSkipVerify": False,
"supportRelativePath": True,
"repositoryAuthentication": True,
"fromAppTemplate": False,
"registries": [6,3],
"FromAppTemplate": False,
"Namespace": "",
"CreatedByUserId": "",
"Webhook": "",
"filesystemPath": "/share/docker_data/portainer/portainer-data/stacks",
"RegistryID": 6
}
print(f"Creating stack: {ns}")
create_stack(base, token, install_endpoint_id, data=req)
if not args.autostart:
stck2 = get_stack(base, ns, token, endpoint_id=install_endpoint_id)
print(print_stacks(base, token, install_endpoint_id,endpoints))
print(f"Stopping stack: ID {stck2['Id']}, Name: {stck2['Name']}")
stop_stack(base, token, install_endpoint_id, stck2['Id'])
if args.delete_stack22:
print(f"Delete stack {args.stack_id}")
if not is_number(args.stack_id) and args.stack_id != "all":
args.stack_id = get_stack(base, args.stack_id, token, install_endpoint_id)['Id']
#print(args.stack_id)
#print(install_endpoint_id)
if args.stop_containers:
if por.all_data["endpoints_status"][args.endpoint_id] != 1:
print(f"Endpoint {por.get_endpoint_name(args.endpoint_id)} is offline")
sys.exit()
print(f"Stopping containers on {por.get_endpoint_name(args.endpoint_id)}")
cont = []
for c in por.all_data["containers"][args.endpoint_id]:
if args.stack == c or args.stack == "all":
cont+=por.all_data["containers"][args.endpoint_id][c]
por.stop_containers(args.endpoint_id,cont)
sys.exit()
delete_stack(base, token, install_endpoint_id, args.stack_id)
if args.stop_stack:
if args.stack_id == "all":
print("Stopping all stacks...")
stcks = get_stacks(base, token, endpoint_id=install_endpoint_id)
# stcks = get_stack(base, sta, token, endpoint_id=install_endpoint_id)
else:
stcks = [get_stack(base, args.stack_id, token, endpoint_id=install_endpoint_id)]
for stck in stcks:
print(f"Stopping stack {stck['Name']}")
stop_stack(base, token, install_endpoint_id, stck['Id'])
if args.start_stack:
if args.stack_id == "all":
print("Starting all stacks...")
stcks = get_stacks(base, token, endpoint_id=install_endpoint_id)
# stcks = get_stack(base, sta, token, endpoint_id=install_endpoint_id)
else:
stcks = [get_stack(base, args.stack_id, token, endpoint_id=install_endpoint_id)]
for stck in stcks:
print(f"Starting stack {stck['Name']}")
start_stack(base, token, install_endpoint_id, stck['Id'])
if args.start_containers:
print("Starting containers")
cont = []
#input(json.dumps(por.all_data,indent=2))
for c in por.all_data["containers"][args.endpoint_id]:
if args.stack == c or args.stack == "all":
cont+=por.all_data["containers"][args.endpoint_id][c]
por.start_containers(args.endpoint_id,cont)
sys.exit()
if args.start_containers:
print("Starting containers")
cont = []
#input(json.dumps(por.all_data,indent=2))
for c in por.all_data["containers"][args.endpoint_id]:
if args.stack == c or args.stack == "all":
cont+=por.all_data["containers"][args.endpoint_id][c]
por.start_containers(args.endpoint_id,cont)
sys.exit()
if args.refresh_environment:
cont = por.refresh()
sys.exit()
if args.refresh_status:
if args.stack_id == "all":
print("Stopping all stacks...")