Compare commits

...

9 Commits

Author SHA1 Message Date
2ffed0ee15 build
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Has been cancelled
2025-12-07 22:00:01 +01:00
227294e19c build
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Has been cancelled
2025-12-02 23:50:18 +01:00
8e543f838a build 2025-12-02 00:45:25 +01:00
b462d4307f klal 2025-12-02 00:23:57 +01:00
dab586d019 klal 2025-12-01 20:12:43 +01:00
8c58dbeb29 build 2025-12-01 19:31:09 +01:00
0169786938 klal 2025-12-01 09:41:27 +01:00
ccfe79cb57 klal
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Has been cancelled
2025-11-30 18:01:00 +01:00
82dc871df4 klal
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Has been cancelled
2025-11-26 23:03:24 +01:00
11 changed files with 220 additions and 179 deletions

View File

@@ -44,9 +44,9 @@
- name: zabbix-agent
role: zabbix-agent
tags: zabbix-agent
- name: autofs_client
role: autofs_client
tags: autofs_client
- name: autofs
role: autofs
tags: autofs
- name: ldap_client
role: ldap_client
tags: ldap_client
@@ -71,9 +71,6 @@
- name: sudoers
role: sudoers
tags: sudoers
- name: docker_packages
role: docker_packages
tags: docker_packages
- name: watcher
role: watcher
tags: watcher

View File

@@ -1,153 +0,0 @@
---
datacenter:
children:
odroid_cluster:
children:
odroid_master:
hosts:
192.168.77.131:
vars:
testVar: 999
odroid_worker:
hosts:
192.168.77.13[2:5]:
vars:
ansible_ssh_user: jd
ansible_ssh_pass: lacijaydee
ansible_become_password: lacijaydee
ssh_args: "-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
ansible_ssh_common_args: "-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
localhost1:
hosts:
localhost
vars:
ansible_user: root
morefine:
hosts:
192.168.77.12:
vars:
jaydee_install_mqtt_srv: true
ansible_python_interpreter: auto_silent
ansible_ssh_user: jd
ansible_become_user: root
ansible_become_password: q
ansible_ssh_common_args: "-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
ryzen:
hosts:
192.168.77.15:
vars:
ansible_python_interpreter: auto_silent
ansible_ssh_user: root
ansible_ssh_pass: lacijaydee
ansible_password: lacijaydee
ansible_become_user: root
ansible_become_password: lacijaydee
omv:
hosts:
192.168.77.189:
vars:
ansible_user: root
ansible_password: lacijaydee
ansible_ssh_user: root
ansible_ssh_pass: lacijaydee
ansible_become_user: root
ansible_become_password: lacijaydee
amd:
hosts:
192.168.77.4:
vars:
ansible_user: root
ansible_password: l4c1j4yd33Du5lo
ansible_ssh_user: root
ansible_ssh_pass: l4c1j4yd33Du5lo
ansible_become_user: root
ansible_become_password: l4c1j4yd33Du5lo
rhasspy:
hosts:
192.168.77.224
vars:
ansible_user: jd
ansible_ssh_pass: q
ansible_become_password: l4c1j4yd33Du5lo
windows:
hosts:
192.168.77.211
vars:
ansible_user: jd
ansible_password: "q"
ansible_connection: winrm
ansible_port: 5985
ansible_winrm_server_cert_validation: ignore
ansible_winrm_kerberos_delegation: true
mqtt_srv:
children:
servers:
hosts:
rpi5-1.home.lan:
rpi5.home.lan:
omv.home.lan:
rack.home.lan:
m-server.home.lan:
zabbix.home.lan:
192.168.77.101:
vars:
ansible_python_interpreter: /usr/bin/python3
ansible_ssh_user: jd
ansible_become_password: l4c1j4yd33Du5lo
ansible_ssh_private_key_file: ssh_key.pem
identity_file: ssh_key.pem
nas:
hosts:
nas.home.lan:
vars:
ansible_ssh_user: admin
become_method: su
become_user: admin
ansible_ssh_private_key_file: ssh_key.pem
# ansible_user: admin
# ansible_pass: l4c1!j4yd33?Du5lo1
ansible_python_interpreter: /share/ZFS530_DATA/.qpkg/QPython312/bin/python3
desktop:
hosts:
morefine.home.lan:
vars:
ansible_ssh_user: jd
ansible_become_user: root
ansible_become_password: q
# ansible_ssh_password: q
ansible_ssh_private_key_file: ssh_key.pem
containers:
children:
servers:
hosts:
rpi5-1.home.lan:
rpi5.home.lan:
m-server.home.lan:
fog.home.lan:
zabbix.home.lan:
omv.home.lan:
192.168.77.101:
vars:
ansible_python_interpreter: /usr/bin/python3
ansible_ssh_user: jd
# ansible_ssh_password: l4c1j4yd33Du5lo
ansible_become_password: l4c1j4yd33Du5lo
ansible_ssh_private_key_file: ssh_key.pem
identity_file: ssh_key.pem
ansible_ssh_pass: l4c1j4yd33Du5lo
nas:
hosts:
nas.home.lan:
192.168.77.106:
vars:
ansible_ssh_user: admin
become_method: su
become_user: admin
ansible_ssh_private_key_file: ssh_key.pem
# ansible_user: admin
# ansible_pass: l4c1!j4yd33?Du5lo1
ansible_python_interpreter: /share/ZFS530_DATA/.qpkg/QPython312/bin/python3

10
requirements.txt Normal file
View File

@@ -0,0 +1,10 @@
ansible-core==2.20.0
ansible-lint==25.11.1
# YAML libs
PyYAML>=6.0.2
ruamel.yaml==0.18.16
ruamel.yaml.clib==0.2.15
jinja2==3.1.6
ansible-compat==25.11.0

View File

@@ -26,6 +26,9 @@
content: |
docker_data -fstype=nfs m-server.home.lan:/docker_data
downloads -fstype=nfs m-server.home.lan:/downloads
movies -fstype=nfs m-server.home.lan:/movies
shows -fstype=nfs m-server.home.lan:/shows
music -fstype=nfs m-server.home.lan:/music
mode: '0600'
owner: root
group: root
@@ -150,6 +153,7 @@
shows --fstype=nfs,rw nas.home.lan:/shows
xxx --fstype=nfs,rw nas.home.lan:/xxx
proxmox --fstype=nfs,rw nas.home.lan:/proxmox
live --fstype=nfs,rw nas.home.lan:/live
mode: '0600'
owner: root
group: root

View File

@@ -18,7 +18,7 @@
apt-mark hold docker-ce docker-compose-plugin docker-ce-rootless-extras docker-ce-cli docker-buildx-plugin
register: logo
changed_when: "logo.rc == 0"
ignore_errors: true
when: inventory_hostname != 'morefine.home.lan'
- name: Upgrade the full OS
ansible.builtin.apt:

View File

@@ -18,9 +18,10 @@
- python3-dev
state: present
update_cache: true
retries: 5
delay: 10
until: result is succeeded
register: install_docker_deps
until: install_docker_deps is succeeded
retries: 10
delay: 10
- name: Get keys for raspotify
ansible.builtin.command:
@@ -213,6 +214,7 @@
path: /etc/systemd/system/docker.service.d/
state: directory
mode: '0755'
- name: Create a directory for certs
ansible.builtin.file:
path: /etc/docker/certs
@@ -274,6 +276,41 @@
ansible.builtin.systemd:
daemon_reload: true
- name: Check if file exists
ansible.builtin.stat:
path: /etc/docker/certs/ca.pem
register: file_check
- name: Print file check result
ansible.builtin.debug:
var: file_check
- name: Include role only if missing
ansible.builtin.include_role:
name: cert_gen
when: not file_check.stat.exists and mode == "cert"
- name: Create docker config file
ansible.builtin.copy:
dest: /etc/docker/daemon.json
content: |
{
"log-driver": "json-file",
"log-opts": {
"max-size": "10m",
"max-file": "3"
},
"data-root": "/var/lib/docker",
"dns": ["192.168.77.101", "192.168.77.106", "8.8.8.8"],
"dns-search": ["lan", "home.lan"]
}
mode: '0644'
owner: root
group: root
- name: Restart docker service
ansible.builtin.service:
name: docker
@@ -283,6 +320,6 @@
# ansible.builtin.shell: docker plugin install grafana/loki-docker-driver:3.3.2-{{ ansible_architecture }} --alias loki --grant-all-permissions
- name: Install a plugin
community.docker.docker_plugin:
plugin_name: grafana/loki-docker-driver:3.3.2
plugin_name: grafana/loki-docker-driver
alias: loki
state: present

View File

@@ -7,5 +7,6 @@
loop_control:
loop_var: roles_item
loop:
- cert_gen
- docker
- docker_swarm
- autofs

View File

@@ -48,44 +48,65 @@
fstype: none
when: inventory_hostname != 'amd.home.lan'
- name: Reconfigure nfs fstab
- name: Reconfigure nfs exports
ansible.builtin.lineinfile:
path: /etc/fstab
regexp: "^/media/m-server/downloads .*"
line: "/media/m-server/downloads /srv/nfs/downloads none bind 0 0"
regexp: "^/share/{{ volume }} .*"
line: "/media/m-server/{{ volume }} /srv/nfs/{{ volume }} none bind 0 0"
when: inventory_hostname == 'm-server.home.lan'
loop_control:
loop_var: volume
loop:
- downloads
- music
- movies
- shows
- name: Reconfigure nfs exports
ansible.builtin.lineinfile:
path: /etc/fstab
regexp: "^/share/docker_data .*"
line: "/share/docker_data /srv/nfs/docker_data none bind 0 0"
regexp: "^/share/{{ volume }} .*"
line: "/share/{{ volume }} /srv/nfs/{{ volume }} none bind 0 0"
when: inventory_hostname == 'm-server.home.lan'
loop_control:
loop_var: volume
loop:
- docker_data
- name: Reconfigure nfs exports
ansible.builtin.lineinfile:
path: /etc/exports
regexp: "^/srv/nfs .*"
line: "/srv/nfs 192.168.77.0/24(rw,sync,no_subtree_check,crossmnt,fsid=0)"
line: "/srv/nfs 192.168.77.0/24(rw,sync,no_subtree_check,crossmnt,fsid=0) 192.168.80.0/24(rw,sync,no_subtree_check,crossmnt,fsid=0)"
- name: Reconfigure nfs exports
ansible.builtin.lineinfile:
path: /etc/exports
regexp: "^/srv/nfs/docker_data .*"
line: "/srv/nfs/docker_data 192.168.77.0/24(rw,sync,no_subtree_check)"
line: "/srv/nfs/docker_data 192.168.77.0/24(rw,sync,no_subtree_check) 192.168.80.0/24(rw,sync,no_subtree_check)"
when: inventory_hostname != 'amd.home.lan'
- name: Reconfigure nfs exports
ansible.builtin.lineinfile:
path: /etc/exports
regexp: "^/srv/nfs/downloads .*"
line: "/srv/nfs/downloads 192.168.77.0/24(rw,sync,no_subtree_check)"
regexp: "^/srv/nfs/{{ volume }} .*"
line: "/srv/nfs/{{ volume }} 192.168.77.0/24(rw,sync,no_subtree_check) 192.168.80.0/24(rw,sync,no_subtree_check)"
when: inventory_hostname == 'm-server.home.lan'
loop_control:
loop_var: volume
loop:
- downloads
- music
- movies
- shows
- name: Reconfigure nfs exports
ansible.builtin.lineinfile:
path: /etc/exports
regexp: "^/srv/nfs/backup .*"
line: "/srv/nfs/backup 192.168.77.0/24(rw,sync,no_subtree_check)"
line: "/srv/nfs/backup 192.168.77.0/24(rw,sync,no_subtree_check) 192.168.80.0/24(rw,sync,no_subtree_check)"
when: inventory_hostname == 'amd.home.lan'
- name: Restart nfs service
ansible.builtin.service:

127
roles/proxmox/tasks/main.yml Executable file
View File

@@ -0,0 +1,127 @@
- name: Omv Setup
become: "{{ false if inventory_hostname == 'nas.home.lan' else true }}"
block:
# - name: Gather facts
# ansible.builtin.setup:
# - name: Print
# ansible.builtin.debug:
# msg: "{{ ansible_facts }}"
- name: Include vault
ansible.builtin.include_vars:
file: jaydee.yml
name: mysecrets
when: inventory_hostname != 'nas.home.lan'
- name: Delete content & directory
ansible.builtin.file:
state: absent
path: "{{ dest_folder }}"
- name: "Check if listed package is installed or not on Debian Linux family"
ansible.builtin.package:
name: "{{ item }}"
state: present
check_mode: true
loop:
- git
register: git_installed
- name: Include role only if missing
ansible.builtin.include_role:
name: git
when: not git_installed
- name: Pull repo
tags:
- git_pull
ansible.builtin.git:
repo: "https://{{ mysecrets['git_user'] | urlencode }}:{{ mysecrets['git_password_mqtt'] | urlencode }}@gitlab.sectorq.eu/jaydee/proxmox.git"
dest: "{{ dest_folder }}"
update: true
clone: true
version: main
when: inventory_hostname != 'nas.home.lan'
- name: Print
ansible.builtin.debug:
msg: "{{ inventory_hostname }}"
- name: Create a directory if it does not exist
ansible.builtin.file:
path: /myapps/proxmox
state: directory
mode: '0755'
owner: root
group: root
when: inventory_hostname != 'nas.home.lan'
- name: Upload script
ansible.builtin.copy:
src: "{{ dest_folder }}/proxmox.py"
dest: /myapps/proxmox/proxmox.py
remote_src: true
mode: '0755'
owner: root
group: root
when: inventory_hostname != 'nas.home.lan'
- name: Upload exclude file
ansible.builtin.copy:
src: "{{ dest_folder }}/exclude.txt"
dest: /myapps/exclude.txt
remote_src: true
mode: '0755'
owner: root
group: root
when: inventory_hostname != 'nas.home.lan'
- name: Upload requirements
ansible.builtin.copy:
src: "{{ dest_folder }}/requirements.txt"
dest: /myapps/requirements.txt
remote_src: true
mode: '0755'
owner: root
group: root
when: inventory_hostname != 'nas.home.lan'
- name: Upload docker_backups.py
ansible.builtin.copy:
src: "{{ dest_folder }}/docker_backups.py"
dest: /myapps/docker_backups.py
remote_src: true
mode: '0755'
owner: root
group: root
when: inventory_hostname == 'm-server.home.lan'
- name: Install venv
ansible.builtin.apt:
name:
- python3-virtualenv
- rsync
- name: Install specified python requirements in indicated (virtualenv)
ansible.builtin.pip:
requirements: /myapps/requirements.txt
virtualenv: /myapps/venv
- name: 'Ensure an old job is no longer present. Removes any job that is prefixed by "#Ansible: an old job" from the crontab'
ansible.builtin.cron:
name: "omv_backup"
state: absent
- name: Upload service config
ansible.builtin.copy:
src: omv_backup.service
dest: /etc/systemd/system/omv_backup.service
mode: '0755'
owner: root
group: root
when: inventory_hostname == 'amd.home.lan'
- name: Restart omv service
ansible.builtin.service:
name: omv_backup
state: restarted
daemon_reload: true
enabled: true
when: inventory_hostname == 'amd.home.lan'
# - name: Ensure a job that runs at 2 and 5 exists. Creates an entry like "0 5,2 * * ls -alh > /dev/null"
# ansible.builtin.cron:
# name: "omv_backup"
# minute: "0"
# hour: "8"
# job: "sudo /myapps/omv_backup.py -b > /dev/null 2>&1 &"
# state: present

1
roles/proxmox/vars/main.yml Executable file
View File

@@ -0,0 +1 @@
dest_folder: "/tmp/ans_repo"

View File

@@ -1,4 +0,0 @@
---
- hosts: datacenter
roles:
- wazuh-agent