Compare commits

...

305 Commits

Author SHA1 Message Date
67b908b5ad build 2025-12-14 10:22:19 +01:00
2a2aa81e5f build 2025-12-14 10:08:30 +01:00
b6d6c62071 build 2025-12-14 10:05:40 +01:00
cc0df08051 build 2025-12-14 02:06:26 +01:00
bf2772103a build 2025-12-14 02:05:47 +01:00
e23498073d build 2025-12-14 02:04:35 +01:00
a75e61ef72 build 2025-12-14 02:04:03 +01:00
d0d3a56a7c build 2025-12-14 02:03:02 +01:00
c6d44a83e2 build 2025-12-14 01:50:14 +01:00
91d00911e5 build 2025-12-14 01:19:08 +01:00
9594626646 build 2025-12-13 21:26:29 +01:00
e96c3015fb build 2025-12-13 15:17:49 +01:00
5b340d6b3d build 2025-12-12 18:10:59 +01:00
94c59d85fc build 2025-12-12 18:07:34 +01:00
f78408484b build 2025-12-12 16:42:51 +01:00
42adb45e9f build 2025-12-12 16:38:53 +01:00
e986f77d26 build 2025-12-12 16:32:23 +01:00
4e1dcb5009 build 2025-12-12 16:25:04 +01:00
4f7c3a285c build 2025-12-12 16:23:39 +01:00
62cab99470 build 2025-12-12 16:22:47 +01:00
1bc1b0dcb9 build 2025-12-12 16:19:03 +01:00
b37bf9f699 build 2025-12-12 16:02:28 +01:00
bed32a51a6 build 2025-12-12 16:00:49 +01:00
cf04cb7f74 build 2025-12-12 15:57:16 +01:00
c835e3fbea build 2025-12-12 15:55:48 +01:00
6affd9f1a4 build 2025-12-12 15:53:56 +01:00
9a064b469e build 2025-12-12 15:23:20 +01:00
c206590073 build 2025-12-12 15:18:18 +01:00
565b25dc9b build 2025-12-12 12:16:10 +01:00
24179fade8 build 2025-12-11 17:12:09 +01:00
1ed48b84c4 build 2025-12-11 16:45:19 +01:00
2cf269a868 build 2025-12-10 23:42:15 +01:00
c01c495b41 build 2025-12-10 23:34:38 +01:00
e2748ccda8 build 2025-12-10 23:33:15 +01:00
a4eb29de87 build 2025-12-10 23:25:37 +01:00
005616c0a6 Merge branch 'main' of gitlab.sectorq.eu:home/docker-compose 2025-12-10 23:24:13 +01:00
74213b1de9 build 2025-12-10 23:23:26 +01:00
ladislav.dusa
f3de9e0995 build 2025-12-09 09:21:14 +01:00
ladislav.dusa
f82f7e0f25 build 2025-12-09 09:20:58 +01:00
a98a226099 build 2025-12-08 18:57:14 +01:00
f6536c0e6c build 2025-12-08 18:53:19 +01:00
b73b3dc929 build 2025-12-08 18:15:05 +01:00
4e8fe0a1a1 build 2025-12-08 18:14:12 +01:00
6484de18e8 build 2025-12-08 18:00:55 +01:00
a92d32e73b build 2025-12-08 16:51:22 +01:00
178f30e9f1 build 2025-12-08 16:49:06 +01:00
121abc76b0 build 2025-12-08 16:48:20 +01:00
62a3ba833f build 2025-12-08 16:47:44 +01:00
cd2dc573f4 build 2025-12-08 16:46:24 +01:00
7b6229bb39 build 2025-12-08 16:41:58 +01:00
f06bcd22a6 build 2025-12-08 16:41:10 +01:00
fa00fadccf build 2025-12-08 16:06:47 +01:00
4537da6174 build 2025-12-08 15:21:02 +01:00
020b784632 build 2025-12-08 15:20:18 +01:00
2963ee88f9 build 2025-12-08 15:08:41 +01:00
682a727d50 build 2025-12-08 14:02:49 +01:00
fca4bb4508 build 2025-12-08 00:02:56 +01:00
8f2d400301 build 2025-12-07 23:25:42 +01:00
bc41a0431d build 2025-12-07 23:22:52 +01:00
838fe4ed2a build 2025-12-07 23:21:54 +01:00
d932db7b28 build 2025-12-07 23:18:19 +01:00
abaf235fed build 2025-12-07 21:13:21 +01:00
914bd21457 build 2025-12-07 21:04:38 +01:00
0a746ab637 build 2025-12-07 21:01:33 +01:00
3191ca5b55 build 2025-12-07 21:00:52 +01:00
5a2235ae06 build 2025-12-07 20:59:57 +01:00
4f212e10f2 build 2025-12-07 20:54:15 +01:00
fc0ea98def build 2025-12-07 20:47:22 +01:00
0d9b5ef975 build 2025-12-07 20:44:01 +01:00
4ce8f2ec9f build 2025-12-07 20:40:26 +01:00
5f24941a59 build 2025-12-07 20:32:19 +01:00
fe563de936 build 2025-12-07 20:17:21 +01:00
324244afac build 2025-12-07 20:06:38 +01:00
106e53fd0e build 2025-12-07 20:05:50 +01:00
ecdbbfb69f build 2025-12-07 20:00:33 +01:00
eb676c9dc6 build 2025-12-07 13:24:10 +01:00
139908ad56 build 2025-12-07 13:19:02 +01:00
46a0ae669c build 2025-12-07 13:16:00 +01:00
4708260c5c build 2025-12-07 13:11:24 +01:00
8fedeb155a build 2025-12-07 13:05:41 +01:00
bd29257d19 build 2025-12-07 13:04:52 +01:00
f5628ee014 build 2025-12-07 12:55:51 +01:00
b70c372e07 build 2025-12-06 16:38:51 +01:00
dcee21d9d5 build 2025-12-06 16:36:27 +01:00
5748b06dee build 2025-12-06 16:32:49 +01:00
042a50b572 build 2025-12-06 16:27:33 +01:00
80781812de build 2025-12-06 16:26:01 +01:00
718dd27570 build 2025-12-06 16:22:19 +01:00
d2e18a53e6 build 2025-12-06 16:20:53 +01:00
7a5291fb37 build 2025-12-06 16:19:30 +01:00
5651b03906 build 2025-12-06 16:18:26 +01:00
246dc997b5 build 2025-12-06 16:17:27 +01:00
81b430e23d build 2025-12-06 16:15:23 +01:00
43b82acc66 build 2025-12-06 16:12:53 +01:00
7b95fd74cd build 2025-12-06 16:10:40 +01:00
403f9396c5 build 2025-12-06 16:08:57 +01:00
1f82d674f8 build 2025-12-06 16:06:47 +01:00
6f66064c8b build 2025-12-06 16:05:02 +01:00
504eb1229a build 2025-12-06 16:01:52 +01:00
c5c66dc914 build 2025-12-06 15:59:44 +01:00
b7efad7a12 build 2025-12-06 15:55:55 +01:00
4d98f7f39e build 2025-12-06 15:53:02 +01:00
f449431a91 build 2025-12-06 15:51:42 +01:00
8f36939703 build 2025-12-06 15:48:47 +01:00
136e637aed build 2025-12-06 15:46:41 +01:00
6ba2c2dd0f build 2025-12-06 15:27:44 +01:00
b4961f5961 build 2025-12-06 15:14:25 +01:00
e34addb608 build 2025-12-06 15:11:35 +01:00
89321a9b46 build 2025-12-06 15:10:34 +01:00
0e31d85bc7 build 2025-12-06 13:25:29 +01:00
68046f8bb3 build 2025-12-06 02:22:16 +01:00
cc3b725b59 build 2025-12-06 02:19:58 +01:00
7af37e4adf build 2025-12-06 02:18:09 +01:00
59276f0a0d build 2025-12-06 02:13:52 +01:00
53a2087e21 build 2025-12-06 01:53:54 +01:00
c4a5822ee7 build 2025-12-06 01:50:44 +01:00
6c65d61a9d build 2025-12-06 01:17:11 +01:00
a3661ef551 build 2025-12-06 01:14:42 +01:00
99c4787189 build 2025-12-06 01:10:29 +01:00
7ee17ad41c build 2025-12-06 01:05:53 +01:00
f414ced56e build 2025-12-06 01:04:34 +01:00
43966299d7 build 2025-12-06 01:01:07 +01:00
df897ceee3 build 2025-12-06 00:59:20 +01:00
4c73d641e8 build 2025-12-06 00:55:48 +01:00
34cb6e28d6 build 2025-12-06 00:54:38 +01:00
690d1a2a15 build 2025-12-06 00:47:35 +01:00
cdd64767f4 build 2025-12-06 00:33:08 +01:00
9080a8914c build 2025-12-06 00:30:40 +01:00
a428b50ecd build 2025-12-06 00:29:45 +01:00
630946ad99 build 2025-12-06 00:20:51 +01:00
0fc0f74183 build 2025-12-06 00:14:24 +01:00
bab3fd7ec3 build 2025-12-06 00:11:34 +01:00
116855a14c build 2025-12-06 00:07:08 +01:00
db91ebd75e build 2025-12-06 00:03:52 +01:00
914ceee33c build 2025-12-05 23:45:12 +01:00
b784bc08e9 build 2025-12-05 23:31:00 +01:00
3eb5938c7d build 2025-12-05 23:28:27 +01:00
05b09e3ab4 build 2025-12-05 23:27:54 +01:00
b1336936a7 build 2025-12-05 23:23:16 +01:00
e3e59b73c6 build 2025-12-05 23:15:16 +01:00
9c8f603390 build 2025-12-05 23:14:25 +01:00
e77462a602 build 2025-12-05 23:05:40 +01:00
3b3a520099 build 2025-12-05 23:02:56 +01:00
b5ad9aa6f1 build 2025-12-05 22:54:34 +01:00
68fe8d68bf build 2025-12-05 22:49:10 +01:00
3fad5abfb2 build 2025-12-05 22:46:00 +01:00
279d51b43e build 2025-12-05 22:36:47 +01:00
782b2361b1 build 2025-12-05 22:35:09 +01:00
ca167b83a1 build 2025-12-05 22:34:31 +01:00
1c7d250719 build 2025-12-05 22:27:08 +01:00
3fd77c7a85 build 2025-12-05 22:02:54 +01:00
c4de5186ef build 2025-12-05 17:40:21 +01:00
8684ec35b1 build 2025-12-05 17:38:52 +01:00
4b3edfb97e build 2025-12-05 17:38:16 +01:00
f8a3b1df09 build 2025-12-05 17:37:12 +01:00
fc47bf7ca8 build 2025-12-05 17:36:18 +01:00
1880468c0a build 2025-12-05 17:35:18 +01:00
cd45bf010a build 2025-12-05 14:23:23 +01:00
b6bb681347 build 2025-12-05 14:22:53 +01:00
75094c4bef build 2025-12-05 14:22:30 +01:00
ad6bddbd27 build 2025-12-05 14:21:56 +01:00
4f23e7bcac build 2025-12-05 14:21:14 +01:00
09af3a71f2 build 2025-12-05 14:18:29 +01:00
4044c739f3 build 2025-12-05 13:34:11 +01:00
52101e3559 build 2025-12-05 07:30:09 +01:00
1699f45b01 build 2025-12-04 20:36:28 +01:00
a95436bff8 build 2025-12-04 20:34:39 +01:00
09f7bcf059 build 2025-12-04 20:28:26 +01:00
60f60c4950 build 2025-12-04 20:26:49 +01:00
89231da969 build 2025-12-04 20:24:48 +01:00
ae54ed3d27 build 2025-12-04 20:24:45 +01:00
da2bbe5318 build 2025-12-04 20:06:12 +01:00
16725c9d47 build 2025-12-04 14:18:31 +01:00
f4742596e5 build 2025-12-04 13:35:13 +01:00
c7f06a3d67 build 2025-12-04 13:29:46 +01:00
225b5e07e4 Merge branch 'main' of gitlab.sectorq.eu:home/docker-compose 2025-12-04 13:28:07 +01:00
e222a43e52 build 2025-12-04 13:27:56 +01:00
ladislav.dusa
e38fdbc412 build 2025-12-03 13:17:01 +01:00
6b9b310267 build 2025-12-02 23:50:19 +01:00
e6b210c5c2 build 2025-12-02 17:45:31 +01:00
05f7d57ea0 build 2025-12-02 09:27:40 +01:00
77bf212ea5 build 2025-12-02 09:15:35 +01:00
4e78ee240b build 2025-12-02 09:10:24 +01:00
71470ad568 build 2025-12-02 00:47:29 +01:00
bfaaccd820 build 2025-12-02 00:07:30 +01:00
d3cc962d2c build 2025-12-02 00:07:11 +01:00
d0430f6c29 build 2025-12-02 00:06:51 +01:00
1f4db460b2 build 2025-12-02 00:05:18 +01:00
2f626e5d1d build 2025-12-02 00:02:48 +01:00
eefa342936 build 2025-12-02 00:02:23 +01:00
b8f2e75104 build 2025-12-01 23:56:28 +01:00
f2cd7820ee build 2025-12-01 23:54:51 +01:00
5fb1992d5a build 2025-12-01 23:52:11 +01:00
920a1612f1 build 2025-12-01 23:51:38 +01:00
5e747541a9 build 2025-12-01 23:51:18 +01:00
6596084339 build 2025-12-01 23:50:10 +01:00
7aa2886f56 build 2025-12-01 23:49:35 +01:00
3faf6f4518 build 2025-12-01 23:44:14 +01:00
7fd268c8f7 build 2025-12-01 23:33:08 +01:00
36c3a04d68 build 2025-12-01 23:32:00 +01:00
a7f03c4018 build 2025-12-01 23:31:08 +01:00
e4aae2ad7f build 2025-12-01 23:29:08 +01:00
cb8da69fb7 build 2025-12-01 23:26:52 +01:00
3db89c2fa4 build 2025-12-01 23:17:55 +01:00
0cee8f9035 build 2025-12-01 23:13:44 +01:00
72e396ca25 build 2025-12-01 23:09:45 +01:00
43b639d032 build 2025-12-01 23:09:19 +01:00
ed9e536fe3 build 2025-12-01 23:04:40 +01:00
9a35e5dd04 build 2025-12-01 23:04:11 +01:00
b4b12f491b build 2025-12-01 23:01:26 +01:00
7b5a0df31a build 2025-12-01 23:01:04 +01:00
38794f8d05 build 2025-12-01 22:54:45 +01:00
7ee80c8dd7 build 2025-12-01 22:51:51 +01:00
15d4158cf4 build 2025-12-01 22:50:00 +01:00
f67185ff7a build 2025-12-01 22:43:16 +01:00
2f3e5f1c34 build 2025-12-01 22:40:54 +01:00
23c1830136 build 2025-12-01 22:39:54 +01:00
4019769b46 build 2025-12-01 22:39:30 +01:00
9a5fef9a6c build 2025-12-01 22:17:46 +01:00
7086a5d938 build 2025-12-01 22:14:56 +01:00
cc1973cfba build 2025-12-01 22:13:40 +01:00
21b1074c66 build 2025-12-01 22:10:13 +01:00
940f6a44b4 build 2025-12-01 22:09:44 +01:00
c711d5f918 build 2025-12-01 22:04:37 +01:00
9a31555e24 build 2025-12-01 22:04:05 +01:00
6ce28fee3d build 2025-12-01 22:02:12 +01:00
59ef2785aa build 2025-12-01 21:32:54 +01:00
05832a32f8 build 2025-12-01 21:25:22 +01:00
df36b5e6e9 build 2025-12-01 21:06:10 +01:00
db968226bd build 2025-12-01 21:05:25 +01:00
b44183d97f build 2025-12-01 20:56:04 +01:00
78f958d101 build 2025-12-01 20:53:34 +01:00
6f0c7e1b01 build 2025-12-01 20:50:22 +01:00
a76a083829 build 2025-12-01 20:49:33 +01:00
a30bdd2aaf build 2025-12-01 20:30:18 +01:00
d4ad6a6e20 build 2025-12-01 20:28:55 +01:00
a52c6d0acf build 2025-12-01 20:05:39 +01:00
83075b5d70 build 2025-12-01 19:37:17 +01:00
b7bda89eac build 2025-12-01 14:40:34 +01:00
019b9279b7 build 2025-12-01 11:54:15 +01:00
238ed8934c build 2025-12-01 11:50:53 +01:00
8832b26ac6 build 2025-12-01 09:46:49 +01:00
df02fb6493 build 2025-12-01 00:45:31 +01:00
2503bdff11 build 2025-12-01 00:44:21 +01:00
46f149d67d build 2025-11-30 23:59:15 +01:00
36f36feea3 build 2025-11-30 23:56:04 +01:00
cde8f6c486 build 2025-11-30 23:54:27 +01:00
8a49f037e2 build 2025-11-30 23:54:10 +01:00
f3c5258573 build 2025-11-30 23:51:33 +01:00
0adb6aee4f build 2025-11-30 23:49:56 +01:00
e1638acd8d build 2025-11-30 23:34:46 +01:00
c349c2e262 build 2025-11-30 23:16:59 +01:00
6fe23b5734 build 2025-11-30 23:15:18 +01:00
4100776d71 build 2025-11-30 22:49:47 +01:00
614aea1790 build 2025-11-30 22:38:10 +01:00
7e0423af92 build 2025-11-30 22:34:44 +01:00
fe0e418533 build 2025-11-30 22:28:40 +01:00
2bda209455 build 2025-11-30 22:25:10 +01:00
661cdf4a37 build 2025-11-30 22:20:45 +01:00
e9598adce8 build 2025-11-30 22:06:14 +01:00
71af5ccc4c build 2025-11-30 22:02:25 +01:00
0f990c2c9e build 2025-11-30 21:58:23 +01:00
28afb56f15 build 2025-11-30 21:45:08 +01:00
8cd6483f92 build 2025-11-30 21:39:05 +01:00
f1d9b5afea build 2025-11-30 21:35:07 +01:00
a733b283b1 build 2025-11-30 21:25:00 +01:00
d7e80a3e06 build 2025-11-30 21:21:55 +01:00
d9495b67a2 build 2025-11-30 20:59:37 +01:00
ecdfa9182a build 2025-11-30 20:56:53 +01:00
7be1fc6085 build 2025-11-30 20:55:45 +01:00
aa68e0f291 build 2025-11-30 20:50:00 +01:00
972be8425a build 2025-11-30 19:37:27 +01:00
f901c8a22c build 2025-11-30 19:35:08 +01:00
99966b04ba build 2025-11-30 18:46:25 +01:00
86ed33513d build 2025-11-30 18:26:06 +01:00
a1ceec582a build 2025-11-30 18:01:08 +01:00
f49b9a13e0 build 2025-11-30 17:28:04 +01:00
c2420987ca build 2025-11-30 17:27:05 +01:00
af8e4b1cbf build 2025-11-30 17:13:02 +01:00
5ef7c025f4 build 2025-11-30 17:12:46 +01:00
befd931165 build 2025-11-30 17:04:24 +01:00
a037496191 build 2025-11-30 17:01:20 +01:00
5e8b06175d build 2025-11-30 16:59:40 +01:00
03516cad45 build 2025-11-30 16:58:51 +01:00
1b14ee6a6d build 2025-11-30 16:57:24 +01:00
0e0383bf49 build 2025-11-30 16:56:13 +01:00
527c18c89f build 2025-11-30 16:43:19 +01:00
03c1e12a27 build 2025-11-30 16:35:09 +01:00
2ddb1fad36 build 2025-11-30 16:31:04 +01:00
6f137b7a1b build 2025-11-30 16:29:53 +01:00
9ae1911a44 build 2025-11-30 16:03:20 +01:00
ac9f9dd009 build 2025-11-30 15:57:29 +01:00
3fbf904a6c build 2025-11-30 15:57:28 +01:00
cfb619f3c3 build 2025-11-30 15:36:23 +01:00
bf052fae54 build 2025-11-30 15:35:00 +01:00
63bf6b805b build 2025-11-30 15:16:46 +01:00
a63b1353a7 build 2025-11-30 15:15:01 +01:00
0443fcf7aa build 2025-11-30 15:13:57 +01:00
d3ef0fb2b7 build 2025-11-30 15:08:49 +01:00
67400a92b0 build 2025-11-30 15:05:19 +01:00
a59d0b5fa7 build 2025-11-30 15:04:25 +01:00
c1dec9fbc7 build 2025-11-30 15:02:09 +01:00
29fe44abdb build 2025-11-30 15:01:11 +01:00
8296f99b41 build 2025-11-30 14:58:16 +01:00
06041dc3ee build 2025-11-30 14:50:36 +01:00
78 changed files with 2680 additions and 544 deletions

View File

@@ -1,4 +1,3 @@
version: '3.9'
services: services:
authentik_ldap: authentik_ldap:
environment: environment:
@@ -8,16 +7,19 @@ services:
TZ: Europe/Bratislava TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}ghcr.io/goauthentik/ldap:${AUTHENTIK_TAG:-2024.6.1} image: ${DOCKER_REGISTRY:-}ghcr.io/goauthentik/ldap:${AUTHENTIK_TAG:-2024.6.1}
ports: ports:
- 2389:3389 - target: 3389
- 2636:6636 published: 2389
protocol: tcp
mode: ingress
- target: 6636
published: 2636
protocol: tcp
mode: ingress
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -38,14 +40,11 @@ services:
timeout: 5s timeout: 5s
image: ${DOCKER_REGISTRY:-docker.io/library/}postgres:16-alpine image: ${DOCKER_REGISTRY:-docker.io/library/}postgres:16-alpine
volumes: volumes:
- /share/docker_data/authentik/database:/var/lib/postgresql/data - database:/var/lib/postgresql/data
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
wud.watch: false wud.watch: 'false'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -63,13 +62,10 @@ services:
volumes: volumes:
- redis:/data - redis:/data
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -85,31 +81,34 @@ services:
TZ: Europe/Bratislava TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}ghcr.io/goauthentik/server:${AUTHENTIK_TAG:-2024.6.1} image: ${DOCKER_REGISTRY:-}ghcr.io/goauthentik/server:${AUTHENTIK_TAG:-2024.6.1}
ports: ports:
- ${COMPOSE_PORT_HTTP:-9003}:9000 - target: 9000
- ${COMPOSE_PORT_HTTPS:-9453}:9443 published: 9003
protocol: tcp
mode: ingress
- target: 9443
published: 9453
protocol: tcp
mode: ingress
volumes: volumes:
- /share/docker_data/authentik/media:/media - media:/media
- /share/docker_data/authentik/custom-templates:/templates - custom-templates:/templates
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
homepage.container: authentik-server-1 homepage.container: authentik_server
homepage.description: Authentification server homepage.description: Authentification server
homepage.group: Utilities homepage.group: Utilities
homepage.href: https://auth.sectorq.eu homepage.href: https://auth.sectorq.eu
homepage.icon: authentik.png homepage.icon: authentik.png
homepage.name: Authentik homepage.name: Authentik
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.weight: '10' homepage.weight: '10'
homepage.widget.key: sVOwPPInTue7ZnvolmKG15hkE9gCyLcuAelLOQny6OIVn7JUilny9loPTG0v homepage.widget.key: sVOwPPInTue7ZnvolmKG15hkE9gCyLcuAelLOQny6OIVn7JUilny9loPTG0v
homepage.widget.type: authentik homepage.widget.type: authentik
homepage.widget.url: https://auth.sectorq.eu homepage.widget.url: https://auth.sectorq.eu
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -127,17 +126,14 @@ services:
user: root user: root
volumes: volumes:
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- /share/docker_data/authentik/media:/media - media:/media
- /share/docker_data/authentik/certs:/certs - certs:/certs
- /share/docker_data/authentik/custom-templates:/templates - custom-templates:/templates
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -146,3 +142,9 @@ volumes:
driver: local driver: local
redis: redis:
driver: local driver: local
custom-templates:
driver: local
media:
driver: local
certs:
driver: local

View File

@@ -33,7 +33,7 @@ services:
wud.watch: false wud.watch: false
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/authentik/database:/var/lib/postgresql/data - database:/var/lib/postgresql/data
redis: redis:
command: --save 60 1 --loglevel warning command: --save 60 1 --loglevel warning
healthcheck: healthcheck:
@@ -50,7 +50,7 @@ services:
wud.watch.digest: true wud.watch.digest: true
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- redis:/data - authentik_redis:/data
server: server:
command: server command: server
depends_on: depends_on:
@@ -80,12 +80,12 @@ services:
wud.watch: true wud.watch: true
wud.watch.digest: true wud.watch.digest: true
ports: ports:
- ${COMPOSE_PORT_HTTP:-9003}:9000 - 9003:9000
- ${COMPOSE_PORT_HTTPS:-9453}:9443 - 9453:9443
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/authentik/media:/media - media:/media
- /share/docker_data/authentik/custom-templates:/templates - templates:/templates
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
worker: worker:
command: worker command: worker
@@ -108,12 +108,18 @@ services:
user: root user: root
volumes: volumes:
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- /share/docker_data/authentik/media:/media - media:/media
- /share/docker_data/authentik/certs:/certs - certs:/certs
- /share/docker_data/authentik/custom-templates:/templates - custom-templates:/templates
volumes: volumes:
database: database:
driver: local driver: local
redis: redis:
driver: local driver: local
custom-templates:
driver: local
media:
driver: local
certs:
driver: local

View File

@@ -3,7 +3,7 @@ PG_USER=authentik
PG_DB=authentik PG_DB=authentik
AUTHENTIK_SECRET_KEY=ZKkVCxj8kKj5ZklvzxKG2IgYQOftDoLPRjc57yomr1qzbKEQVZ AUTHENTIK_SECRET_KEY=ZKkVCxj8kKj5ZklvzxKG2IgYQOftDoLPRjc57yomr1qzbKEQVZ
AUTHENTIK_ERROR_REPORTING__ENABLED=true AUTHENTIK_ERROR_REPORTING__ENABLED=true
AUTHENTIK_TAG=2025.8.4 AUTHENTIK_TAG=2025.10.2
POSTGRES_PASSWORD=499NU6Ze5HcJK4IwSShO8oDbj3j0i0CalyEzfgEp POSTGRES_PASSWORD=499NU6Ze5HcJK4IwSShO8oDbj3j0i0CalyEzfgEp
POSTGRES_USER=authentik POSTGRES_USER=authentik
POSTGRES_DB=authentik POSTGRES_DB=authentik

View File

@@ -1,3 +1,6 @@
volumes:
data:
driver: local
services: services:
bitwarden: bitwarden:
environment: environment:
@@ -19,17 +22,17 @@ services:
mode: ingress mode: ingress
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/bitwarden/bw-data:/data - data:/data
deploy: deploy:
labels: labels:
com.centurylinklabs.watchtower.enable: 'true' com.centurylinklabs.watchtower.enable: 'true'
homepage.container: vaultwarden homepage.container: bitwarden_bitwarden
homepage.description: Password manager homepage.description: Password manager
homepage.group: Utilities homepage.group: Utilities
homepage.href: https://pw.sectorq.eu homepage.href: https://pw.sectorq.eu
homepage.icon: bitwarden.png homepage.icon: bitwarden.png
homepage.name: Bitwarden homepage.name: Bitwarden
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.weight: '1' homepage.weight: '1'
wud.watch: 'true' wud.watch: 'true'
wud.watch.digest: 'true' wud.watch.digest: 'true'

View File

@@ -29,4 +29,7 @@ services:
- 8181:80 - 8181:80
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/bitwarden/bw-data:/data - data:/data
volumes:
data:
driver: local

View File

@@ -1,30 +1,35 @@
version: '3.9' volumes:
app_data:
driver: local
db_data:
driver: local
services: services:
app: app:
env_file: env_file:
- stack.env - stack.env
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/bookstack:latest image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/bookstack:latest
ports: ports:
- 6875:80 - target: 80
published: 6875
protocol: tcp
mode: ingress
volumes: volumes:
- /share/docker_data/bookstack/bookstack_app_data:/config - app_data:/config
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
homepage.container: bookstack-app-1 homepage.container: bookstack_app
homepage.description: Books homepage.description: Books
homepage.group: Utilities homepage.group: Utilities
homepage.href: https://bookstack.sectorq.eu homepage.href: https://bookstack.sectorq.eu
homepage.icon: bookstack.png homepage.icon: bookstack.png
homepage.name: Bookstack homepage.name: Bookstack
homepage.server: my-docker-swarm homepage.server: my-docker-swarm
homepage.weight: 1 homepage.weight: '1'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -36,15 +41,12 @@ services:
PUID: 0 PUID: 0
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/mariadb image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/mariadb
volumes: volumes:
- /share/docker_data/bookstack/bookstack_db_data:/config - db_data:/config
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager

View File

@@ -1,16 +1,16 @@
PUID: 1000 PUID=1000
PGID: 1000 PGID=1000
APP_URL: https://bookstack.sectorq.eu APP_URL=https://bookstack.sectorq.eu
DB_HOST: db DB_HOST=db
DB_PORT: 3306 DB_PORT=3306
DB_USER: bookstack DB_USER=bookstack
DB_PASS: l4c1j4yd33Du5lo DB_PASS=l4c1j4yd33Du5lo
DB_DATABASE: bookstackapp DB_DATABASE=bookstackapp
MYSQL_ROOT_PASSWORD: l4c1j4yd33Du5lo MYSQL_ROOT_PASSWORD=l4c1j4yd33Du5lo
TZ: Europe/Bratislava TZ=Europe/Bratislava
MYSQL_DATABASE: bookstackapp MYSQL_DATABASE=bookstackapp
MYSQL_USER: bookstack MYSQL_USER=bookstack
MYSQL_PASSWORD: l4c1j4yd33Du5lo MYSQL_PASSWORD=l4c1j4yd33Du5lo
# # Set authentication method to be saml2 # # Set authentication method to be saml2
# AUTH_METHOD: saml2 # AUTH_METHOD: saml2
# # Control if BookStack automatically initiates login via your SAML system if it's the only authentication method. # # Control if BookStack automatically initiates login via your SAML system if it's the only authentication method.
@@ -46,41 +46,41 @@ MYSQL_PASSWORD: l4c1j4yd33Du5lo
# Set OIDC to be the authentication method # Set OIDC to be the authentication method
AUTH_METHOD: oidc AUTH_METHOD=oidc
#AUTH_METHOD: standard #AUTH_METHOD: standard
# Control if BookStack automatically initiates login via your OIDC system # Control if BookStack automatically initiates login via your OIDC system
# if it's the only authentication method. Prevents the need for the # if it's the only authentication method. Prevents the need for the
# user to click the "Login with x" button on the login page. # user to click the "Login with x" button on the login page.
# Setting this to true enables auto-initiation. # Setting this to true enables auto-initiation.
AUTH_AUTO_INITIATE: true AUTH_AUTO_INITIATE=true
# Set the display name to be shown on the login button. # Set the display name to be shown on the login button.
# (Login with <name>) # (Login with <name>)
OIDC_NAME: SSO OIDC_NAME=SSO
# Name of the claims(s) to use for the user's display name. # Name of the claims(s) to use for the user's display name.
# Can have multiple attributes listed, separated with a '|' in which # Can have multiple attributes listed, separated with a '|' in which
# case those values will be joined with a space. # case those values will be joined with a space.
# Example: OIDC_DISPLAY_NAME_CLAIMS=given_name|family_name # Example: OIDC_DISPLAY_NAME_CLAIMS=given_name|family_name
OIDC_DISPLAY_NAME_CLAIMS: name OIDC_DISPLAY_NAME_CLAIMS=name
# OAuth Client ID to access the identity provider # OAuth Client ID to access the identity provider
OIDC_CLIENT_ID: GCPj547vTmEpmsCM8jkuR222SS31yZMdp7oAU82U OIDC_CLIENT_ID=GCPj547vTmEpmsCM8jkuR222SS31yZMdp7oAU82U
# OAuth Client Secret to access the identity provider # OAuth Client Secret to access the identity provider
OIDC_CLIENT_SECRET: Nador7SOdsYgfNhRwbeRKLNPkPiASBAlTnKVi294xbOz8MM3e2RlzAaWQsQNZmBtLLZVifb1TG3OpKrVXeeW3Vu8HmJuvy8GwSAT2r0pP0241tDdEShq7UkP9G5Esdt8 OIDC_CLIENT_SECRET=Nador7SOdsYgfNhRwbeRKLNPkPiASBAlTnKVi294xbOz8MM3e2RlzAaWQsQNZmBtLLZVifb1TG3OpKrVXeeW3Vu8HmJuvy8GwSAT2r0pP0241tDdEShq7UkP9G5Esdt8
# Issuer URL # Issuer URL
# Must start with 'https://' # Must start with 'https://'
OIDC_ISSUER: https://auth.sectorq.eu/application/o/bookstack/ OIDC_ISSUER=https://auth.sectorq.eu/application/o/bookstack/
# The "end session" (RP-initiated logout) URL to call during BookStack logout. # The "end session" (RP-initiated logout) URL to call during BookStack logout.
# By default this is false which disables RP-initiated logout. # By default this is false which disables RP-initiated logout.
# Setting to "true" will enable logout if found as supported by auto-discovery. # Setting to "true" will enable logout if found as supported by auto-discovery.
# Otherwise, this can be set as a specific URL endpoint. # Otherwise, this can be set as a specific URL endpoint.
OIDC_END_SESSION_ENDPOINT: false OIDC_END_SESSION_ENDPOINT=false
# Enable auto-discovery of endpoints and token keys. # Enable auto-discovery of endpoints and token keys.
# As per the standard, expects the service to serve a # As per the standard, expects the service to serve a
# `<issuer>/.well-known/openid-configuration` endpoint. # `<issuer>/.well-known/openid-configuration` endpoint.
OIDC_ISSUER_DISCOVER: true OIDC_ISSUER_DISCOVER=true

View File

@@ -0,0 +1,22 @@
services:
docker_mon:
image: ${DOCKER_REGISTRY:-}philhawthorne/ha-dockermon:latest
ports:
- target: 8126
published: 8126
protocol: tcp
mode: ingress
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- config:/config
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
config:

View File

@@ -1,9 +1,11 @@
version: '3.9' networks:
gitea:
external: false
services: services:
server: server:
environment: environment:
USER_UID: '1000' USER_UID: 1000
USER_GID: '1000' USER_GID: 1000
ROOT_URL: https://gitea.sectorq.eu ROOT_URL: https://gitea.sectorq.eu
ENABLE_PASSWORD_SIGNIN_FORM: 'false' ENABLE_PASSWORD_SIGNIN_FORM: 'false'
DISABLE_REGISTRATION: 'true' DISABLE_REGISTRATION: 'true'
@@ -11,59 +13,69 @@ services:
networks: networks:
- gitea - gitea
ports: ports:
- 3000:3000 - target: 3000
- '222:22' published: 3000
protocol: tcp
mode: ingress
- target: 22
published: 222
protocol: tcp
mode: ingress
volumes: volumes:
- /share/docker_data/gitea:/data - data:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: 'true' com.centurylinklabs.watchtower.enable: 'true'
homepage.container: gitea homepage.container: gitea_server
homepage.description: version control server homepage.description: Version control server
homepage.group: utilities homepage.group: Utilities
homepage.href: https://${appname}.sectorq.eu homepage.href: https://${APPNAME}.sectorq.eu
homepage.icon: ${appname}.png homepage.icon: ${APPNAME}.png
homepage.name: gitea homepage.name: Gitea
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.weight: '1' homepage.weight: '1'
homepage.widget.key: ${token} homepage.widget.key: "b7b6e21beb7489c170215e2b7ae0d9b0099132d6"
homepage.widget.type: ${appname} homepage.widget.type: ${APPNAME}
homepage.widget.url: https://${appname}.sectorq.eu homepage.widget.url: https://${APPNAME}.sectorq.eu
homepage.widget.version: '2' homepage.widget.version: '2'
wud.watch: 'true' wud.watch: 'true'
wud.watch.digest: 'true' wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
runner: runner:
image: ${DOCKER_REGISTRY:-}docker.io/gitea/act_runner:nightly image: ${DOCKER_REGISTRY:-}docker.io/gitea/act_runner:nightly
secrets:
- gitea_runner_registration_token
environment: environment:
CONFIG_FILE: /config/config.yaml CONFIG_FILE: /config/config.yaml
GITEA_INSTANCE_URL: https://gitea.sectorq.eu/ GITEA_INSTANCE_URL: https://gitea.sectorq.eu/
GITEA_RUNNER_REGISTRATION_TOKEN: 8nmkqjhkvywltmnff2o9vs0tzo70ufhsqpvg6ymb GITEA_RUNNER_REGISTRATION_TOKEN_FILE: /run/secrets/gitea_runner_registration_token
GITEA_RUNNER_NAME: jaydee GITEA_RUNNER_NAME: jaydee
GITEA_RUNNER_LABELS: jaydee GITEA_RUNNER_LABELS: jaydee
volumes: volumes:
- /share/docker_data/gitea-runner/config:/config
- /share/docker_data/gitea-runner/data:/data - runner_config:/config
- runner_data:/data
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- /etc/localtime:/etc/localtime:ro
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
wud.watch: 'true' wud.watch: 'true'
wud.watch.digest: 'true' wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
networks: volumes:
gitea: data:
external: false driver: local
runner_config:
driver: local
runner_data:
driver: local
secrets:
gitea_runner_registration_token:
external: true

View File

@@ -1,5 +1,16 @@
services: services:
runner:
container_name: gitlab-runner
restart: always
volumes:
- runner:/etc/gitlab-runner
- /var/run/docker.sock:/var/run/docker.sock
image: ${DOCKER_REGISTRY:-}gitlab/gitlab-runner:latest
labels:
- wud.watch.digest=true
- wud.watch=true
web: web:
container_name: gitlab container_name: gitlab
environment: environment:
GITLAB_OMNIBUS_CONFIG: "external_url 'https://gitlab.sectorq.eu'\nnginx['listen_port']\ GITLAB_OMNIBUS_CONFIG: "external_url 'https://gitlab.sectorq.eu'\nnginx['listen_port']\
@@ -47,8 +58,13 @@ services:
restart: unless-stopped restart: unless-stopped
shm_size: 4gb shm_size: 4gb
volumes: volumes:
- /share/docker_data/gitlab/config:/etc/gitlab - config:/etc/gitlab
- /share/docker_data/gitlab/logs:/var/log/gitlab - logs:/var/log/gitlab
- /share/docker_data/gitlab/data:/var/opt/gitlab - data:/var/opt/gitlab
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
version: '3.6' version: '3.6'
volumes:
runner:
config:
logs:
data:

View File

@@ -0,0 +1,101 @@
services:
runner:
container_name: gitlab-runner
restart: always
volumes:
- runner:/etc/gitlab-runner
- /var/run/docker.sock:/var/run/docker.sock
image: ${DOCKER_REGISTRY:-}gitlab/gitlab-runner:latest
labels:
- wud.watch.digest=true
- wud.watch=true
dns:
- 192.168.77.1
- 192.168.77.101
app:
environment:
GITLAB_OMNIBUS_CONFIG: |
external_url 'https://gitlab.sectorq.eu'
nginx['listen_port'] = 80
nginx['listen_https'] = false
web_server['username'] = 'git'
gitlab_rails['time_zone'] = 'Europe/Bratislava'
gitlab_rails['omniauth_enabled'] = true
gitlab_rails['omniauth_allow_single_sign_on'] = ['saml']
gitlab_rails['omniauth_sync_email_from_provider'] = 'saml'
gitlab_rails['omniauth_sync_profile_from_provider'] = ['saml']
gitlab_rails['omniauth_sync_profile_attributes'] = ['email']
gitlab_rails['omniauth_auto_sign_in_with_provider'] = 'saml'
gitlab_rails['omniauth_block_auto_created_users'] = false
gitlab_rails['omniauth_auto_link_saml_user'] = true
gitlab_rails['omniauth_providers'] = [
{
name: 'saml',
args: {
assertion_consumer_service_url: 'https://gitlab.sectorq.eu/users/auth/saml/callback',
# Shown when navigating to certificates in authentik1
idp_cert_fingerprint: 'f7:fd:49:03:b3:38:52:b3:23:f5:43:c4:8d:08:65:32:e0:5a:7b:0e',
idp_sso_target_url: 'https://auth.sectorq.eu/application/saml/gitlab/sso/binding/redirect/',
issuer: 'https://gitlab.sectorq.eu',
name_identifier_format: 'urn:oasis:names:tc:SAML:2.0:nameid-format:persistent',
attribute_statements: {
email: ['http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress'],
first_name: ['http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name'],
nickname: ['http://schemas.goauthentik.io/2021/02/saml/username']
}
},
label: 'authentik'
}
]
TZ: Europe/Bratislava
hostname: gitlab.sectorq.eu
image: ${DOCKER_REGISTRY:-}gitlab/gitlab-ce:latest
network_mode: bridge
ports:
- target: 80
published: 8785
protocol: tcp
mode: ingress
- target: 443
published: 8743
protocol: tcp
mode: ingress
- target: 22
published: 8722
protocol: tcp
mode: ingress
shm_size: 4gb
volumes:
- config:/etc/gitlab
- logs:/var/log/gitlab
- data:/var/opt/gitlab
- /etc/localtime:/etc/localtime:ro
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: gitlab
homepage.description: Version control
homepage.group: Infrastructure
homepage.href: https://gitlab.sectorq.eu
homepage.icon: gitlab.png
homepage.name: Gitlab
homepage.server: my-docker-swarm
homepage.weight: '1'
homepage.widget.key: glpat-BuMKcaDqeD-Wx3dW4TM9
homepage.widget.type: gitlab
homepage.widget.url: https://gitlab.sectorq.eu
homepage.widget.user_id: '2'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
config:
driver: local
logs:
driver: local
data:
driver: local

View File

@@ -1,5 +1,9 @@
networks: networks:
loki: null loki:
volumes:
loki_data:
grafana_data:
grafana_certs:
services: services:
grafana: grafana:
entrypoint: entrypoint:
@@ -35,21 +39,20 @@ services:
published: 3007 published: 3007
protocol: tcp protocol: tcp
mode: ingress mode: ingress
restart: ${RESTART:-unless-stopped}
user: 0:0 user: 0:0
volumes: volumes:
- /share/docker_data/grafana/data:/var/lib/grafana - grafana_data:/var/lib/grafana
- /share/docker_data/grafana/certs:/certs - grafana_certs:/certs
deploy: deploy:
labels: labels:
com.centurylinklabs.watchtower.enable: 'true' com.centurylinklabs.watchtower.enable: 'true'
homepage.container: grafana homepage.container: grafana_grafana
homepage.description: Graphs homepage.description: Graphs
homepage.group: Smarthome homepage.group: Smarthome
homepage.href: https://g.sectorq.eu homepage.href: https://g.sectorq.eu
homepage.icon: grafana.png homepage.icon: grafana.png
homepage.name: Grafana homepage.name: Grafana
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.weight: '1' homepage.weight: '1'
wud.watch: 'true' wud.watch: 'true'
wud.watch.digest: 'true' wud.watch.digest: 'true'
@@ -60,6 +63,8 @@ services:
loki: loki:
command: -config.file=/etc/loki/local-config.yaml command: -config.file=/etc/loki/local-config.yaml
image: ${DOCKER_REGISTRY:-}grafana/loki:latest image: ${DOCKER_REGISTRY:-}grafana/loki:latest
volumes:
- loki_data:/loki
networks: networks:
- loki - loki
ports: ports:
@@ -67,7 +72,6 @@ services:
published: 3100 published: 3100
protocol: tcp protocol: tcp
mode: ingress mode: ingress
restart: ${RESTART:-unless-stopped}
deploy: deploy:
labels: labels:
wud.watch: 'true' wud.watch: 'true'
@@ -81,11 +85,13 @@ services:
image: ${DOCKER_REGISTRY:-}grafana/promtail:latest image: ${DOCKER_REGISTRY:-}grafana/promtail:latest
networks: networks:
- loki - loki
configs:
- source: promtail
target: /etc/promtail/config.yml
volumes: volumes:
- /var/log:/var/log - /var/log:/var/log
- /share/docker_data/grafana/promtail/config.yml:/etc/promtail/config.yml #- /share/docker_data/grafana/promtail/config.yml:/etc/promtail/config.yml
- /share/Data/__GITLAB/omv_backup/:/share/Data/__GITLAB/omv_backup/ #- /share/Data/__GITLAB/omv_backup/:/share/Data/__GITLAB/omv_backup/
restart: ${RESTART:-unless-stopped}
deploy: deploy:
labels: labels:
wud.watch: 'true' wud.watch: 'true'
@@ -100,7 +106,6 @@ services:
- 8092 - 8092
networks: networks:
- loki - loki
restart: ${RESTART:-unless-stopped}
deploy: deploy:
labels: labels:
com.centurylinklabs.watchtower.enable: 'true' com.centurylinklabs.watchtower.enable: 'true'
@@ -110,3 +115,6 @@ services:
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
configs:
promtail:
external: true

View File

@@ -0,0 +1,47 @@
services:
vault:
image: hashicorp/vault:latest
command: server -config=/vault/config/vault.hcl
volumes:
- data:/vault/data
configs:
- source: vault_hcl
target: /vault/config/vault.hcl
ports:
- "8200:8200"
environment:
VAULT_LOCAL_CONFIG: |
{
"backend": {
"file": {
"path": "/vault/file"
}
},
"listener": {
"tcp": {
"address": "0.0.0.0:8200",
"tls_disable": 1
}
},
"disable_mlock": true
}
VAULT_API_ADDR: "http://192.168.77.101:8200"
cap_add:
- IPC_LOCK
networks:
- vault-net
deploy:
mode: replicated
replicas: 1
placement:
constraints:
- node.role == manager
configs:
vault_hcl:
external: true
volumes:
data:
networks:
vault-net:
driver: overlay

View File

@@ -1 +0,0 @@
l4c1j4yd33Du5lo

View File

@@ -1,110 +1,108 @@
version: '3.9'
services: services:
homeassistant: homeassistant:
network_mode: host
image: ${DOCKER_REGISTRY:-}ghcr.io/home-assistant/home-assistant:latest image: ${DOCKER_REGISTRY:-}ghcr.io/home-assistant/home-assistant:latest
volumes: volumes:
- /share/docker_data/ha/:/config - ha_config:/config
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- /run/dbus:/run/dbus:ro - /run/dbus:/run/dbus:ro
networks:
- swarm-ipvlan
- traefik-public
- homeassistant-internal
privileged: true privileged: true
environment: environment:
- DISABLE_JEMALLOC=value DISABLE_JEMALLOC: value
- TZ=Europe/Bratislava TZ: Europe/Bratislava
dns: dns:
- 192.168.77.101 - 192.168.77.101
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
homepage.group: Smarthome homepage.group: Smarthome
homepage.name: Home Assistant homepage.name: Home Assistant
homepage.weight: 1 homepage.weight: '1'
homepage.icon: home-assistant.png homepage.icon: home-assistant.png
homepage.href: https://ha.sectorq.eu homepage.href: https://ha.sectorq.eu
homepage.description: 3D Printing homepage.description: 3D Printing
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.container: HomeAssistant homepage.container: HomeAssistant
homepage.widget.type: homeassistant homepage.widget.type: homeassistant
homepage.widget.url: https://ha.sectorq.eu homepage.widget.url: https://ha.sectorq.eu
homepage.widget.key: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiIzOTk5NGJjYjIzYjk0YzExYmM5OWZiNTBlNzU0N2M2YyIsImlhdCI6MTc0MDM5OTY4NCwiZXhwIjoyMDU1NzU5Njg0fQ.LDebvPGreyZzlWT1CylHSdSt8i_cWO72HnNCsCAIaG8 homepage.widget.key: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiIzOTk5NGJjYjIzYjk0YzExYmM5OWZiNTBlNzU0N2M2YyIsImlhdCI6MTc0MDM5OTY4NCwiZXhwIjoyMDU1NzU5Njg0fQ.LDebvPGreyZzlWT1CylHSdSt8i_cWO72HnNCsCAIaG8
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
esphome: esphome:
image: ${DOCKER_REGISTRY:-}esphome/esphome:latest image: ${DOCKER_REGISTRY:-}esphome/esphome:latest
volumes: volumes:
- /share/docker_data/esphome/config:/config - esphome_config:/config
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
privileged: true privileged: true
network_mode: host network_mode: host
environment: environment:
- USERNAME=jaydee USERNAME: jaydee
- PASSWORD=jaydee1 PASSWORD: jaydee1
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
homepage.group: Smarthome homepage.group: Smarthome
homepage.name: ESPHome homepage.name: ESPHome
homepage.weight: 1 homepage.weight: '1'
homepage.icon: esphome.png homepage.icon: esphome.png
homepage.href: https://esphome.sectorq.eu homepage.href: https://esphome.sectorq.eu
homepage.description: 3D Printing homepage.description: 3D Printing
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.container: esphome homepage.container: esphome
homepage.widget.type: esphome homepage.widget.type: esphome
homepage.widget.url: https://esphome.sectorq.eu homepage.widget.url: https://esphome.sectorq.eu
homepage.widget.username: jaydee homepage.widget.username: jaydee
homepage.widget.password: jaydee1 homepage.widget.password: jaydee1
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
wyoming-piper-en: wyoming-piper-en:
image: ${DOCKER_REGISTRY:-}rhasspy/wyoming-piper image: ${DOCKER_REGISTRY:-}rhasspy/wyoming-piper
ports: ports:
- 10200:10200 - target: 10200
published: 10200
protocol: tcp
mode: ingress
volumes: volumes:
- /share/docker_data/piper/english:/data - piper_data:/data
command: --data-dir /data --voice en_US-lessac-medium command: --data-dir /data --voice en_US-lessac-medium
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
wyoming-whisper-en: wyoming-whisper-en:
image: ${DOCKER_REGISTRY:-}rhasspy/wyoming-whisper image: ${DOCKER_REGISTRY:-}rhasspy/wyoming-whisper
ports: ports:
- 10300:10300 - target: 10300
published: 10300
protocol: tcp
mode: ingress
volumes: volumes:
- /share/docker_data/whisper/english:/data - whisper_data:/data
command: --data-dir /data --model tiny-int8 --language en command: --data-dir /data --model tiny-int8 --language en
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -114,22 +112,21 @@ services:
--model 'ok_nabu' --uri 'tcp://0.0.0.0:10400' --threshold 0.7 --trigger-level --model 'ok_nabu' --uri 'tcp://0.0.0.0:10400' --threshold 0.7 --trigger-level
2 --debug 2 --debug
volumes: volumes:
- /share/docker_data/openwakeword-data:/data - openwakeword_data:/data
- /share/docker_data/openwakeword-data:/custom - openwakeword_data:/custom
environment: environment:
- TZ=Europe/Bratislava TZ: Europe/Bratislava
ports: ports:
- 10400:10400 - target: 10400
- 10400:10400/udp published: 10400
protocol: tcp
mode: ingress
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -138,18 +135,15 @@ services:
security_opt: security_opt:
- apparmor=unconfined - apparmor=unconfined
volumes: volumes:
- /share/docker_data/matter-server:/data - matter-server:/data
- /run/dbus:/run/dbus:ro - /run/dbus:/run/dbus:ro
network_mode: host network_mode: host
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
@@ -157,52 +151,50 @@ services:
image: ${DOCKER_REGISTRY:-}ghcr.io/music-assistant/server:latest image: ${DOCKER_REGISTRY:-}ghcr.io/music-assistant/server:latest
network_mode: host network_mode: host
volumes: volumes:
- /share/docker_data/music-assistant-server/data:/data/ - music_assistant_server_data:/data/
cap_add: cap_add:
- SYS_ADMIN - SYS_ADMIN
- DAC_READ_SEARCH - DAC_READ_SEARCH
security_opt: security_opt:
- apparmor:unconfined - apparmor:unconfined
environment: environment:
- LOG_LEVEL=info LOG_LEVEL: info
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
homepage.group: Smarthome homepage.group: Smarthome
homepage.name: music-assistant homepage.name: music-assistant
homepage.weight: 1 homepage.weight: '1'
homepage.icon: music-assistant.png homepage.icon: music-assistant.png
homepage.href: https://music.sectorq.eu homepage.href: https://music.sectorq.eu
homepage.description: Music homepage.description: Music
homepage.server: my-docker homepage.server: my-docker-swarm
homepage.container: music-assistant-server homepage.container: music-assistant-server
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
influxdb: influxdb:
ports: ports:
- 8086:8086 - target: 8086
published: 8086
protocol: tcp
mode: ingress
volumes: volumes:
- /share/docker_data/influxdb/data:/var/lib/influxdb2 - influxdb2_data:/var/lib/influxdb2
- /share/docker_data/influxdb/config:/etc/influxdb2 - influxdb2_config:/etc/influxdb2
secrets: secrets:
- influxdb2-admin-username - ha_influxdb2_admin_token
- influxdb2-admin-password
- influxdb2-admin-token
environment: environment:
- DOCKER_INFLUXDB_INIT_MODE=setup DOCKER_INFLUXDB_INIT_MODE: setup
- DOCKER_INFLUXDB_INIT_USERNAME=ha DOCKER_INFLUXDB_INIT_USERNAME: ha
- DOCKER_INFLUXDB_INIT_PASSWORD=haHAhaHA DOCKER_INFLUXDB_INIT_PASSWORD: haHAhaHA
- DOCKER_INFLUXDB_INIT_ORG=ha DOCKER_INFLUXDB_INIT_ORG: ha
- DOCKER_INFLUXDB_INIT_BUCKET=ha DOCKER_INFLUXDB_INIT_BUCKET: ha
- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=mytoken123 #DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: mytoken123
- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN_FILE=/run/secrets/influxdb2-admin-token DOCKER_INFLUXDB_INIT_ADMIN_TOKEN_FILE: /run/secrets/ha_influxdb2_admin_token
image: ${DOCKER_REGISTRY:-}influxdb:2 image: ${DOCKER_REGISTRY:-}influxdb:2
healthcheck: healthcheck:
test: echo test > /var/lib/influxdb2/hc || exit 1 test: echo test > /var/lib/influxdb2/hc || exit 1
@@ -210,21 +202,24 @@ services:
timeout: 3s timeout: 3s
retries: 2 retries: 2
deploy: deploy:
mode: replicated
replicas: 1
restart_policy:
condition: any
labels: labels:
com.centurylinklabs.watchtower.enable: true com.centurylinklabs.watchtower.enable: 'true'
wud.watch: true wud.watch: 'true'
wud.watch.digest: true wud.watch.digest: 'true'
replicas: 1
placement: placement:
constraints: constraints:
- node.role == manager - node.role == manager
volumes:
influxdb2_data:
influxdb2_config:
music_assistant_server_data:
matter-server:
ha_config:
esphome_config:
piper_data:
whisper_data:
openwakeword_data:
secrets: secrets:
influxdb2-admin-username: ha_influxdb2_admin_token:
file: .env.influxdb2-admin-username external: true
influxdb2-admin-password:
file: .env.influxdb2-admin-password
influxdb2-admin-token:
file: .env.influxdb2-admin-token

View File

@@ -0,0 +1,35 @@
services:
homepage:
dns:
- 192.168.77.1
- 192.168.77.101
environment:
HOMEPAGE_ALLOWED_HOSTS: sectorq.eu,active.home.lan:3003,m-server.home.lan:3003,rpi5.home.lan:3003,nas.home.lan:3003,192.168.77.238:3003,rack.home.lan:3003,192.168.80.222:3003
TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}ghcr.io/gethomepage/homepage:latest
ports:
- target: 3000
published: 3003
protocol: tcp
mode: ingress
volumes:
- config:/app/config
- /var/run/docker.sock:/var/run/docker.sock:ro
- images:/app/public/images
- icons:/app/public/icons
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
config:
driver: local
images:
driver: local
icons:
driver: local

View File

@@ -21,4 +21,6 @@ DB_PASSWORD=postgres
DB_USERNAME=postgres DB_USERNAME=postgres
DB_DATABASE_NAME=immich DB_DATABASE_NAME=immich
HW_MODE1=vaapi HW_MODE1=vaapi
HW_MODE2=openvino HW_MODE2=openvino
APPNAME=immich
DOCKER_REGISTRY=r.sectorq.eu/library/

View File

@@ -0,0 +1,85 @@
services:
server:
image: ${DOCKER_REGISTRY:-}ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
# devices:
# - /dev/dri:/dev/dri
# group_add:
# - video
# - 993
volumes:
- ${UPLOAD_LOCATION}:/data
- /etc/localtime:/etc/localtime:ro
- /media/nas/photo:/mnt/photos2
env_file:
- .env
ports:
- target: 2283
published: 2283
protocol: tcp
mode: ingress
healthcheck:
disable: false
deploy:
labels:
homepage.container: immich_server
homepage.description: Photo server
homepage.group: Media
homepage.href: https://${APPNAME}.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Immich
homepage.server: my-docker-swarm
homepage.widget.key: mdaRNyiY19w9YEz3MXT3fiPD9XH3CtQYRM26C0wZJM
homepage.widget.type: ${APPNAME}
homepage.widget.url: https://${APPNAME}.sectorq.eu
homepage.widget.version: '2'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
machine-learning:
image: ${DOCKER_REGISTRY:-}ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
# device_cgroup_rules:
# - 'c 189:* rmw'
# devices:
# - /dev/dri:/dev/dri
volumes:
- model-cache:/cache
- /dev/bus/usb:/dev/bus/usb
env_file:
- .env
healthcheck:
disable: false
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
redis:
image: ${DOCKER_REGISTRY:-}docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
healthcheck:
test: redis-cli ping || exit 1
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
database:
image: ${DOCKER_REGISTRY:-}ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: --data-checksums
volumes:
- db:/var/lib/postgresql/data
shm_size: 128mb
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
model-cache: null
db: null

View File

@@ -20,4 +20,6 @@ DB_PASSWORD=postgres
################################################################################### ###################################################################################
DB_USERNAME=postgres DB_USERNAME=postgres
DB_DATABASE_NAME=immich DB_DATABASE_NAME=immich
POSTGRES_PASSWORD=postgres POSTGRES_PASSWORD=postgres
APPNAME=immich
DOCKER_REGISTRY=r.sectorq.eu/library/

View File

@@ -0,0 +1,37 @@
services:
influxdb:
ports:
- target: 8086
published: 8087
protocol: tcp
mode: ingress
volumes:
- data:/var/lib/influxdb2
- config:/etc/influxdb2
secrets:
- influxdb2-admin-token
environment:
DOCKER_INFLUXDB_INIT_MODE: setup
DOCKER_INFLUXDB_INIT_USERNAME: ha
DOCKER_INFLUXDB_INIT_PASSWORD: haHAhaHA
DOCKER_INFLUXDB_INIT_ORG: ha
DOCKER_INFLUXDB_INIT_BUCKET: ha
DOCKER_INFLUXDB_INIT_ADMIN_TOKEN_FILE: /run/secrets/influxdb2-admin-token
image: ${DOCKER_REGISTRY:-}influxdb:2
healthcheck:
test: echo test > /var/lib/influxdb2/hc || exit 1
interval: 10s
timeout: 3s
retries: 2
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
secrets:
influxdb2-admin-token:
external: true
volumes:
data:
config:

View File

@@ -0,0 +1,27 @@
services:
base-notebook:
ports:
- target: 8888
published: 8888
protocol: tcp
mode: ingress
volumes:
- data:/home/jovyan/work
image: ${DOCKER_REGISTRY:-}jupyter/base-notebook:latest
deploy:
labels:
homepage.container: jupyter_base-notebook
homepage.description: Python server
homepage.group: Utils
homepage.href: http://m-server.home.lan:8888/
homepage.icon: ${APPNAME}.png
homepage.name: Jupyter Notebook
homepage.server: my-docker-swarm
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:

View File

@@ -0,0 +1,98 @@
services:
kestra:
command: server standalone --worker-thread=128
environment:
SECRET_MYPASSWORD: bDRjMWo0eWQzM0R1NWxv
SECRET_GITLAB: Z2xwYXQtdWotbi1lRWZUWTM5OFBFNHZLU1M=
KESTRA_CONFIGURATION: |
datasources:
postgres:
url: jdbc:postgresql://postgres:5432/kestra
driverClassName: org.postgresql.Driver
username: kestra
password: k3str4
kestra:
server:
basicAuth:
enabled: false
username: "jaydee@sectorq.eu" # it must be a valid email address
password: ${PASSWORD}
repository:
type: postgres
storage:
type: local
local:
basePath: "/app/storage"
queue:
type: postgres
tasks:
tmpDir:
path: /tmp/kestra-wd/tmp
url: http://localhost:8080/
tutorial-flows:
enabled: false
micronaut:
server:
cors:
enabled: true
image: ${DOCKER_REGISTRY:-}kestra/kestra:${KESTRA_VERSION:-latest}
ports:
- target: 8080
published: 8980
protocol: tcp
mode: ingress
- target: 8081
published: 8981
protocol: tcp
mode: ingress
user: root
volumes:
- /etc/localtime:/etc/localtime:ro
- data:/app/storage
- /var/run/docker.sock:/var/run/docker.sock
- /tmp/kestra-wd:/tmp/kestra-wd
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: kestra_kestra
homepage.description: Automation
homepage.group: Infrastructure
homepage.href: https://${APPNAME}.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Kestra
homepage.server: my-docker-swarm
homepage.weight: '1'
wud.display.icon: mdi:evernote
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
postgres:
environment:
POSTGRES_DB: kestra
POSTGRES_PASSWORD: k3str4
POSTGRES_USER: kestra
healthcheck:
interval: 30s
retries: 10
test:
- CMD-SHELL
- pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}
timeout: 10s
image: ${DOCKER_REGISTRY:-}postgres:16
volumes:
- db:/var/lib/postgresql/data
deploy:
labels:
wud.watch: 'false'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:
driver: local
db:
driver: local

View File

@@ -117,16 +117,16 @@ services:
- webmail - webmail
- radicale - radicale
ports: ports:
- 0.0.0.0:8880:80 - '8880:80'
- 0.0.0.0:8443:443 - '8443:443'
- 0.0.0.0:25:25 - '25:25'
- 0.0.0.0:465:465 - '465:465'
- 0.0.0.0:587:587 - '587:587'
- 0.0.0.0:110:110 - '110:110'
- 0.0.0.0:995:995 - '995:995'
- 0.0.0.0:143:143 - '143:143'
- 0.0.0.0:993:993 - '993:993'
- 0.0.0.0:4190:4190 - '4190:4190'
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/mailu3/certs:/certs - /share/docker_data/mailu3/certs:/certs

View File

@@ -0,0 +1,259 @@
networks:
clamav:
driver: overlay
default:
driver: overlay
ipam:
config:
- subnet: 192.168.205.0/24
driver: default
fts_attachments:
driver: overlay
internal: true
oletools:
driver: overlay
internal: true
radicale:
driver: overlay
webmail:
driver: overlay
services:
admin:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}admin:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/data:/data
- /share/docker_data/mailu3/dkim:/dkim
networks:
# Swarm uses service discovery, but requires network connection
- default
# DNS is handled by Swarm's internal DNS resolver (the resolver service will be discoverable by name)
antispam:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}rspamd:${MAILU_VERSION:-2024.06}
env_file: stack.env
hostname: antispam
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/filter:/var/lib/rspamd
- /share/docker_data/mailu3/overrides/rspamd:/overrides:ro
networks:
- default
- oletools
- clamav
antivirus:
image: ${DOCKER_REGISTRY:-}clamav/clamav-debian:1.2.0-6
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/filter/clamav:/var/lib/clamav
networks:
- clamav
healthcheck:
test:
- CMD-SHELL
- kill -0 `cat /tmp/clamd.pid` && kill -0 `cat /tmp/freshclam.pid`
interval: 10s
timeout: 5s
retries: 3
start_period: 10s
fetchmail:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}fetchmail:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/data/fetchmail:/data
networks:
- default # Connect to 'default' for service discovery
front:
# NOTE: 'extends' is removed. You must manually define logging or accept default.
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}nginx:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
homepage.container: mailu3-front-1
homepage.description: eMail server
homepage.group: Utilities
homepage.href: https://mail.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Mailu
homepage.server: my-docker
homepage.weight: 1
volumes:
- /share/docker_data/mailu3/certs:/certs
- /share/docker_data/mailu3/overrides/nginx:/overrides:ro
networks:
- default
- webmail
- radicale
ports:
- target: 80
published: 8880
protocol: tcp
mode: ingress
- target: 443
published: 8443
protocol: tcp
mode: ingress
- target: 25
published: 25
protocol: tcp
mode: ingress
- target: 465
published: 465
protocol: tcp
mode: ingress
- target: 587
published: 587
protocol: tcp
mode: ingress
- target: 110
published: 110
protocol: tcp
mode: ingress
- target: 995
published: 995
protocol: tcp
mode: ingress
- target: 143
published: 143
protocol: tcp
mode: ingress
- target: 993
published: 993
protocol: tcp
mode: ingress
- target: 4190
published: 4190
protocol: tcp
mode: ingress
fts_attachments:
image: ${DOCKER_REGISTRY:-}apache/tika:2.9.2.1-full
hostname: tika
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
networks:
- fts_attachments
healthcheck:
test:
- CMD-SHELL
- wget -nv -t1 -O /dev/null http://127.0.0.1:9998/tika || exit 1
interval: 10s
timeout: 5s
retries: 3
start_period: 10s
imap:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}dovecot:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/mail:/mail
- /share/docker_data/mailu3/overrides/dovecot:/overrides:ro
networks:
- default
- fts_attachments
oletools:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}oletools:${MAILU_VERSION:-2024.06}
hostname: oletools
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
networks:
- oletools
redis:
image: ${DOCKER_REGISTRY:-}redis:alpine
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/redis:/data
networks:
- default # Connect to default network
resolver:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}unbound:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
networks:
default:
# NOTE: Swarm does not support static IPs for scaling.
# This will fail standard 'docker stack deploy'.
# For mailu, the static IP is critical, so we attempt to enforce it
# via the deploy key, but be aware this is highly non-standard.
# It's better to configure Mailu to use the service name 'resolver' instead of the static IP.
# If using a customized deployer:
# deploy:
# placement:
# constraints:
# - node.hostname == your-swarm-manager
# endpoint_mode: dnsrr
# mode: global
# replicas: 1
# labels:
# com.docker.stack.static_ips: 192.168.205.254
# com.docker.stack.static_network: default
ipv4_address: 192.168.205.254
smtp:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}postfix:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/mailqueue:/queue
- /share/docker_data/mailu3/overrides/postfix:/overrides:ro
networks:
- default # Connect to default network
webdav:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}radicale:${MAILU_VERSION:-2024.06}
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/dav:/data
networks:
- radicale
webmail:
image: ${DOCKER_REGISTRY:-}ghcr.io/mailu/${DOCKER_PREFIX:-}webmail:${MAILU_VERSION:-2024.06}
env_file: stack.env
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
volumes:
- /share/docker_data/mailu3/webmail:/data
- /share/docker_data/mailu3/overrides/roundcube:/overrides:ro
networks:
- webmail

View File

@@ -0,0 +1,46 @@
services:
app:
image: ${DOCKER_REGISTRY}ghcr.io/mealie-recipes/mealie:v2.8.0
ports:
- target: 9000
published: 9925
protocol: tcp
mode: ingress
deploy:
resources:
limits:
memory: 1000M
labels:
homepage.container: mealie_app
homepage.description: Recipe server
homepage.group: Utils
homepage.href: https://${APPNAME}.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Mealie
homepage.server: my-docker-swarm
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
- data:/app/data/
environment:
ALLOW_SIGNUP: 'false'
PUID: 1000
PGID: 1000
TZ: Europe/Bratislava
BASE_URL: https://mealie.sectorq.eu
OIDC_AUTH_ENABLED: 'true'
OIDC_PROVIDER_NAME: authentik
OIDC_CONFIGURATION_URL: https://auth.sectorq.eu/application/o/mealie/.well-known/openid-configuration
OIDC_CLIENT_ID: QfrrMn3EzUqkb3ueFl8UQe983qCxr50O2eScPZ3b
OIDC_CLIENT_SECRET: SN5QQJzEZO6kFbyZJ4JcaUbev1CH3VDFfyfB0oeJXo23r0Wx74xpfLS3OMAvoRW8QFxpaYwsRm492MHtZIHaofwf29yhjADHA2DABPecSGAm8V6JVU8m4HRSF3NjDyTV
OIDC_SIGNUP_ENABLED: 'true'
OIDC_USER_GROUP: mealie-users
OIDC_ADMIN_GROUP: mealie-admins
OIDC_AUTO_REDIRECT: 'true'
OIDC_REMEMBER_ME: 'true'
volumes:
data:

View File

@@ -144,7 +144,7 @@ services:
ports: ports:
- 8096:8096 - 8096:8096
- 8920:8920 - 8920:8920
- 7359:7359/udp - 7359:7359
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/jellyfin:/config - /share/docker_data/jellyfin:/config
@@ -248,7 +248,7 @@ services:
ports: ports:
- 8085:8085 - 8085:8085
- 6881:6881 - 6881:6881
- 6881:6881/udp
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/qbittorrent/config:/config - /share/docker_data/qbittorrent/config:/config

View File

@@ -0,0 +1,420 @@
networks:
duplicati:
driver: overlay
mediarr:
driver: overlay
volumes:
homarr_configs:
homarr_icons:
homarr_data:
jackett_config:
jackett_downloads:
jellyfin_config:
jellyseerr_config:
lidarr_config:
qbittorrent_config:
radarr_config:
sonarr_config:
bazarr_config:
m-server_music:
driver: local
driver_opts:
type: nfs
o: addr=192.168.77.101,rw,nfsvers=4.1
device: :/music
services:
bazarr:
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
hostname: bazarr
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/bazarr:latest
networks:
- mediarr
dns:
- 192.168.77.101
ports:
- target: 6767
published: 6767
protocol: tcp
mode: ingress
volumes:
- bazarr_config:/config
- /media/m-server/movies:/movies/m-server
- /media/m-server/shows:/tv/m-server
- /media/nas/movies:/movies/nas
- /media/nas/shows:/tv/nas
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_bazarr
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://bazarr.sectorq.eu
homepage.icon: bazarr.png
homepage.name: bazarr
homepage.server: my-docker-swarm
homepage.weight: '90'
homepage.widget.key: ${BAZARR_TOKEN}
homepage.widget.type: bazarr
homepage.widget.url: https://bazarr.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
flaresolverr:
environment:
LOG_LEVEL: info
TZ: Europe/Bratislava
hostname: flaresolverr
image: ${DOCKER_REGISTRY:-}ghcr.io/flaresolverr/flaresolverr:latest
networks:
- mediarr
ports:
- target: 8191
published: 8191
protocol: tcp
mode: ingress
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
homarr:
hostname: homarr
image: ${DOCKER_REGISTRY:-}ghcr.io/ajnart/homarr:latest
networks:
- mediarr
ports:
- target: 7575
published: 7575
protocol: tcp
mode: ingress
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- homarr_configs:/app/data/configs
- homarr_icons:/app/public/icons
- homarr_data:/data
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
jackett:
dns:
- 192.168.77.101
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
AUTO_UPDATE: 'true'
RUN_OPTS: ''
hostname: jackett
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/jackett:latest
networks:
- mediarr
ports:
- target: 9117
published: 9117
protocol: tcp
mode: ingress
volumes:
- jackett_config:/config
- jackett_downloads:/downloads
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_jackett
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://jackett.sectorq.eu
homepage.icon: jackett.png
homepage.name: Jackett
homepage.server: my-docker-swarm
homepage.weight: '80'
homepage.widget.password: ${JACKET_TOKEN}
homepage.widget.type: jackett
homepage.widget.url: https://jackett.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
jellyfin:
environment:
TZ: Europe/Bratislava
JELLYFIN_PublishedServerUrl: https://jf.sectorq.eu
VAAPI_DEVICE: /dev/dri/renderD128
LIBVA_DRIVER_NAME: radeonsi
hostname: jellyfin
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/jellyfin:latest
ports:
- target: 8096
published: 8096
protocol: tcp
mode: ingress
- target: 8920
published: 8920
protocol: tcp
mode: ingress
- target: 7359
published: 7359
protocol: tcp
mode: ingress
user: root
volumes:
- jellyfin_config:/config
- /media/m-server/movies:/data/movies/m-server
- m-server_music:/data/music/m-server
- /media/m-server/shows:/data/shows/m-server
- /media/nas/movies:/data/movies/nas
- /media/nas/music:/data/music/nas
- /media/nas/shows:/data/shows/nas
- /media/nas/xxx:/data/xxx/nas
- /dev/dri:/dev/dri
devices:
- /dev/dri/renderD128:/dev/dri/renderD128
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_jellyfin
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://jf.sectorq.eu
homepage.icon: jellyfin.png
homepage.name: Jellyfin
homepage.server: my-docker-swarm
homepage.weight: '10'
homepage.widget.key: ${JELLYFIN_TOKEN}
homepage.widget.type: jellyfin
homepage.widget.url: https://jf.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
- node.labels.gpu == amd
jellyseerr:
environment:
LOG_LEVEL: debug
TZ: Europe/Bratislava
hostname: jellyseerr
image: ${DOCKER_REGISTRY:-}fallenbagel/jellyseerr:latest
networks:
- mediarr
ports:
- target: 5055
published: 5055
protocol: tcp
mode: ingress
volumes:
- jellyseerr_config:/app/config
deploy:
labels:
com.centurylinklabs.watchtower.enabl: 'true'
homepage.container: mediacenter_jellyseerr
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://js.sectorq.eu
homepage.icon: jellyseerr.png
homepage.name: Jellyseerr
homepage.server: my-docker-swarm
homepage.weight: '20'
homepage.widget.key: ${JELLYSEER_TOKEN}
homepage.widget.type: jellyseerr
homepage.widget.url: https://js.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
lidarr:
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
hostname: lidarr
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/lidarr:latest
networks:
- mediarr
ports:
- target: 8686
published: 8686
protocol: tcp
mode: ingress
volumes:
- lidarr_config:/config
- /media/m-server/music:/music
- /media/m-server/downloads:/downloads
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_lidarr
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://lidarr.sectorq.eu
homepage.icon: lidarr.png
homepage.name: Lidarr
homepage.server: my-docker-swarm
homepage.weight: '60'
homepage.widget.key: ${LIDARR_TOKEN}
homepage.widget.type: lidarr
homepage.widget.url: https://lidarr.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
qbittorrent:
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
WEBUI_PORT: '8085'
FILE__PASSWORD: /run/secrets/mysecretpassword
hostname: qbittorrent
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/qbittorrent:latest
networks:
- mediarr
ports:
- target: 8085
published: 8085
protocol: tcp
mode: ingress
- target: 6881
published: 6881
protocol: tcp
mode: ingress
volumes:
- qbittorrent_config:/config
- /media/m-server/downloads:/downloads
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_qbittorrent
homepage.description: Subtitles
homepage.group: Utilities
homepage.href: https://qbit.sectorq.eu
homepage.icon: qbittorrent.png
homepage.name: Qbittorrent
homepage.server: my-docker-swarm
homepage.weight: '95'
homepage.widget.enableLeechProgress: 'false'
homepage.widget.password: ${QBIT_TOKEN}
homepage.widget.type: qbittorrent
homepage.widget.url: https://qbit.sectorq.eu
homepage.widget.username: admin
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
radarr:
dns:
- 192.168.77.101
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
hostname: radarr
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/radarr:latest
networks:
- mediarr
ports:
- target: 7878
published: 7878
protocol: tcp
mode: ingress
volumes:
- radarr_config:/config
- /media/m-server/movies/:/movies-m-server
- /media/nas/movies/:/movies-nas
- /media/m-server/downloads:/downloads
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_radarr
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://radarr.sectorq.eu
homepage.icon: radarr.png
homepage.name: Radarr
homepage.server: my-docker-swarm
homepage.weight: '20'
homepage.widget.key: ${RADARR_TOKEN}
homepage.widget.type: radarr
homepage.widget.url: https://radarr.sectorq.eu
wud.display.icon: mdi:radarr
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
sonarr:
dns:
- 192.168.77.101
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
hostname: sonarr
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/sonarr:latest
networks:
- mediarr
ports:
- target: 8989
published: 8989
protocol: tcp
mode: ingress
volumes:
- sonarr_config:/config
- /media/m-server/shows:/tv-m-server
- /media/nas/shows:/tv-nas
- /media/m-server/downloads:/downloads
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: mediacenter_sonarr
homepage.description: Subtitles
homepage.group: Media
homepage.href: https://sonarr.sectorq.eu
homepage.icon: sonarr.png
homepage.name: Sonarr
homepage.server: my-docker-swarm
homepage.weight: '30'
homepage.widget.key: ${SONARR_TOKEN}
homepage.widget.type: sonarr
homepage.widget.url: https://sonarr.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager

View File

@@ -0,0 +1,25 @@
services:
mosquitto:
image: ${DOCKER_REGISTRY:-}eclipse-mosquitto
ports:
- target: 1883
published: 1883
protocol: tcp
mode: host
volumes:
- conf:/mosquitto/config
- data:/mosquitto/data
- log:/mosquitto/log
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
conf:
data:
log:

View File

@@ -0,0 +1,40 @@
services:
app:
dns:
- 192.168.77.101
environment:
TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}ghcr.io/motioneye-project/motioneye:edge
ports:
- target: 8081
published: 8081
protocol: tcp
mode: ingress
- target: 8765
published: 8765
protocol: tcp
mode: ingress
volumes:
- /etc/localtime:/etc/localtime:ro
- config:/etc/motioneye
- data:/var/lib/motioneye
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: motioneye_app
homepage.description: Video manager
homepage.group: Media
homepage.href: http://m-server.home.lan:8765/
homepage.icon: /images/motioneye.webp
homepage.name: MotionEye
homepage.server: my-docker-swarm
homepage.weight: '1'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
config:
data:

42
__swarm/n8n/n8n-swarm.yml Normal file
View File

@@ -0,0 +1,42 @@
version: '3'
services:
app:
image: ${DOCKER_REGISTRY:-}n8nio/n8n:latest
ports:
- target: 5678
published: 5679
protocol: tcp
mode: ingress
environment:
N8N_HOST: n8n.sectorq.eu
N8N_PORT: '5678'
N8N_PROTOCOL: https
N8N_BASIC_AUTH_ACTIVE: 'true'
N8N_BASIC_AUTH_USER: sth
N8N_BASIC_AUTH_PASSWORD: pwd
N8N_RUNNERS_ENABLED: 'true'
N8N_RUNNERS_MODE: internal
N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS: 'true'
N8N_SECURE_COOKIE: 'false'
WEBHOOK_URL: https://n8n.sectorq.eu
volumes:
- data:/home/node/.n8n
stop_grace_period: 60s
deploy:
labels:
homepage.container: n8n_app
homepage.description: Workflow management
homepage.group: Utils
homepage.href: https://${APPNAME}.sectorq.eu
homepage.icon: /icons/n8n.svg
homepage.name: n8n
homepage.server: my-docker-swarm
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:
driver: local

View File

@@ -0,0 +1,15 @@
services:
nebula-sync:
image: ghcr.io/lovelaze/nebula-sync:latest
environment:
PRIMARY: http://192.168.77.101:9380|l4c1j4yd33Du5lo
REPLICAS: http://192.168.77.106:9380|l4c1j4yd33Du5lo
CLIENT_SKIP_TLS_VERIFICATION: 'true'
FULL_SYNC: 'true'
RUN_GRAVITY: 'true'
CRON: 0 * * * *
deploy:
replicas: 1
placement:
constraints:
- node.role == manager

View File

@@ -1,2 +1,3 @@
APPNAME=nextcloud APPNAME=nextcloud
#RESTART=always #RESTART=always
DOCKER_REGISTRY=r.sectorq.eu/library/

View File

@@ -0,0 +1,109 @@
services:
app:
dns:
- 192.168.77.101
env_file:
- stack.env
image: ${DOCKER_REGISTRY:-}nextcloud:latest
links:
- db
ports:
- target: 80
published: 8134
protocol: tcp
mode: ingress
volumes:
- data:/var/www/html
- pre-installation:/docker-entrypoint-hooks.d/pre-installation
- post-installation:/docker-entrypoint-hooks.d/post-installation
- pre-upgrade:/docker-entrypoint-hooks.d/pre-upgrade
- post-upgrade:/docker-entrypoint-hooks.d/post-upgrade
- before-starting:/docker-entrypoint-hooks.d/before-starting
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
com.centurylinklabs.watchtower.lifecycle.post-update: apt update;apt install
-y smbclient;chown -R www-data:www-data /var/www/html
homepage.container: nextcloud_app
homepage.description: Cloud server
homepage.group: Infrastructure
homepage.href: https://nc.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Nextcloud
homepage.server: my-docker-swarm
homepage.widget.password: oGeiy-tTc8p-LJdt5-na3JF-dbWpY
homepage.widget.type: ${APPNAME}
homepage.widget.url: https://nc.sectorq.eu
homepage.widget.username: jaydee
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
db:
command: --transaction-isolation=READ-COMMITTED --binlog-format=ROW --innodb-file-per-table=1
--skip-innodb-read-only-compressed
env_file:
- stack.env
image: ${DOCKER_REGISTRY:-}yobasystems/alpine-mariadb:latest
volumes:
- mariadb:/var/lib/mysql
- /etc/localtime:/etc/localtime
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
redis:
image: ${DOCKER_REGISTRY:-}redis:alpine
volumes:
- redis:/data
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
appapi-harp:
environment:
HP_SHARED_KEY: l4c1j4yd33Du5lo
NC_INSTANCE_URL: https://nc.sectorq.eu
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- certs:/certs
hostname: appapi-harp
ports:
- target: 8780
published: 8780
protocol: tcp
mode: ingress
- target: 8782
published: 8782
protocol: tcp
mode: ingress
image: ${DOCKER_REGISTRY:-}ghcr.io/nextcloud/nextcloud-appapi-harp:release
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:
pre-installation:
post-installation:
pre-upgrade:
post-upgrade:
before-starting:
mariadb:
redis:
certs:

View File

@@ -0,0 +1,55 @@
networks:
pihole_pihole:
external: true
services:
app:
dns:
- 192.168.78.254
healthcheck:
interval: 10s
test:
- CMD
- /usr/bin/check-health
timeout: 3s
image: jc21/nginx-proxy-manager:latest
networks:
- pihole_pihole
ports:
- target: 80
published: 8099
protocol: tcp
mode: ingress
- target: 443
published: 4439
protocol: tcp
mode: ingress
- target: 81
published: 81
protocol: tcp
mode: ingress
volumes:
- data:/data
- letsencrypt:/etc/letsencrypt
deploy:
labels:
homepage.container: nginx-app-1
homepage.description: Reverse Proxy
homepage.group: Infrastructure
homepage.href: http://active.home.lan:81
homepage.icon: nginx-proxy-manager.png
homepage.name: Nginx
homepage.server: my-docker-swarm
homepage.weight: '25'
homepage.widget.password: OdyAJvifHvDPMOyFdbiKak5S
homepage.widget.type: npm
homepage.widget.url: http://active.home.lan:81
homepage.widget.username: monitoring@sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:
letsencrypt:

View File

@@ -0,0 +1,32 @@
services:
app:
dns:
- 192.168.77.101
environment:
TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}nodered/node-red:latest
ports:
- target: 1880
published: 1880
protocol: tcp
mode: ingress
volumes:
- data:/data
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
homepage.container: node-red_app
homepage.description: Node red
homepage.group: Infrastructure
homepage.href: http://active.home.lan:1880
homepage.icon: node-red.png
homepage.name: Node-red
homepage.server: my-docker-swarm
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:

View File

@@ -0,0 +1,24 @@
services:
app:
image: ${DOCKER_REGISTRY:-}ollama/ollama:rocm
devices:
- /dev/kfd
- /dev/dri
volumes:
- models:/root/.ollama
environment:
HSA_OVERRIDE_GFX_VERSION: 11.0.0
ports:
- target: 11434
published: 11434
protocol: tcp
mode: ingress
dns:
- 8.8.8.8
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
models:

View File

@@ -0,0 +1,103 @@
services:
server:
build:
context: .
image: onlyoffice/documentserver
environment:
DB_TYPE: postgres
DB_HOST: postgresql
DB_PORT: '5432'
DB_NAME: onlyoffice
DB_USER: onlyoffice
AMQP_URI: amqp://guest:guest@onlyoffice-rabbitmq
ports:
- target: 80
published: 8280
protocol: tcp
mode: ingress
- target: 443
published: 22443
protocol: tcp
mode: ingress
healthcheck:
test:
- CMD
- curl
- -f
- http://localhost:8000/info/info.json
interval: 30s
retries: 5
start_period: 60s
timeout: 10s
stdin_open: true
stop_grace_period: 60s
volumes:
- data:/var/www/onlyoffice/Data
- logs:/var/log/onlyoffice
- cache:/var/lib/onlyoffice/documentserver/App_Data/cache/files
- files:/var/www/onlyoffice/documentserver-example/public/files
- fonts:/usr/share/fonts
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
homepage.container: onlyoffice_server
homepage.description: OnlyOffice Document Server
homepage.group: Infrastructure
homepage.href: http://active.home.lan:8280/example/
homepage.icon: onlyoffice.png
homepage.name: OnlyOffice Document Server
homepage.server: my-docker-swarm
replicas: 1
placement:
constraints:
- node.role == manager
onlyoffice-rabbitmq:
image: rabbitmq:3
expose:
- '5672'
healthcheck:
test:
- CMD
- rabbitmq-diagnostics
- status
interval: 10s
retries: 3
start_period: 10s
timeout: 10s
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
postgresql:
image: postgres:15
environment:
POSTGRES_DB: onlyoffice
POSTGRES_USER: onlyoffice
POSTGRES_HOST_AUTH_METHOD: trust
expose:
- '5432'
volumes:
- db:/var/lib/postgresql
healthcheck:
test:
- CMD-SHELL
- pg_isready -U onlyoffice
interval: 10s
retries: 3
start_period: 10s
timeout: 10s
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
db:
data:
logs:
cache:
files:
fonts:

View File

@@ -0,0 +1,69 @@
services:
broker:
image: ${DOCKER_REGISTRY:-}docker.io/library/redis:8
volumes:
- redisdata:/data
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
webserver:
image: ${DOCKER_REGISTRY:-}ghcr.io/paperless-ngx/paperless-ngx:latest
ports:
- target: 8000
published: 8001
protocol: tcp
mode: ingress
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- export:/usr/src/paperless/export
- consume:/usr/src/paperless/consume
- scripts:/opt/scripts
env_file: stack.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_APPS: allauth.socialaccount.providers.openid_connect
PAPERLESS_URL: https://paperless.sectorq.eu
PAPERLESS_CSRF_TRUSTED_ORIGINS: https://paperless.sectorq.eu
PAPERLESS_POST_CONSUME_SCRIPT: /opt/scripts/post-consumption.sh
PAPERLESS_SOCIALACCOUNT_PROVIDERS: >
{
"openid_connect": {
"APPS": [
{
"provider_id": "authentik",
"name": "Authentik",
"client_id": "B4NM614bqWkvDqGDAmR823qUm8n4ZNlG3XtvkI51",
"secret": "7FFRdLWOUHlDxkhc86xR2yhxRn8BmDfTtfX9aTVY1XbRY197zy3UXPs51IMIkIjwjp6uijtpIQDDJDpR7LNInJt0F5hEXGMEcTfJxYyfNv2ytKFO58tCN5UD2EnzbCmN",
"settings": {
"server_url": "https://auth.sectorq.eu/application/o/paperless/.well-known/openid-configuration"
}
}
],
"OAUTH_PKCE_ENABLED": "True"
}
}
deploy:
labels:
homepage.container: paperless-ngx_webserver
homepage.description: PDF server
homepage.group: Utils
homepage.href: https://paperless.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Paperless
homepage.server: my-docker-swarm
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:
media:
export:
consume:
scripts:
redisdata:

View File

@@ -0,0 +1,70 @@
volumes:
data:
driver: local
dnsmasq_d:
driver: local
networks:
pihole:
driver: overlay
attachable: true
ipam:
config:
- subnet: 192.168.78.0/24
services:
app:
cap_add:
- NET_ADMIN
- SYS_TIME
- SYS_NICE
environment:
FTLCONF_dns_listeningMode: all
FTLCONF_dns_upstreams: 8.8.8.8;8.8.4.4
FTLCONF_webserver_api_password: ${PASSWORD}
TZ: Europe/Bratislava
hostname: m-server
image: pihole/pihole:latest
networks:
pihole:
ipv4_address: 192.168.78.254
ports:
- target: 53
published: 53
protocol: udp
mode: ingress
- target: 80
published: 9380
protocol: tcp
mode: ingress
- target: 443
published: 9343
protocol: tcp
mode: ingress
volumes:
- data:/etc/pihole
- dnsmasq_d:/etc/dnsmasq.d
- type: tmpfs
target: /dev/shm
tmpfs:
size: 248000000
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: pihole_app
homepage.description: Add blocker
homepage.group: Infrastructure
homepage.href: https://active.home.lan:9343/admin
homepage.icon: /images/pihole.png
homepage.name: Pihole
homepage.server: my-docker-swarm
homepage.weight: '1'
homepage.widget.key: ${PASSWORD}
homepage.widget.type: pihole
homepage.widget.url: https://active.home.lan:9343
homepage.widget.version: '6'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager

View File

@@ -0,0 +1,27 @@
services:
rancher:
command: --acme-domain rancher.sectorq.eu
image: ${DOCKER_REGISTRY:-}rancher/rancher:latest
ports:
- target: 80
published: 7080
protocol: tcp
mode: ingress
- target: 443
published: 7443
protocol: tcp
mode: ingress
volumes:
- data:/var/lib/rancher
cap_add:
- ALL # add all capabilities
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
data:

View File

@@ -21,3 +21,4 @@ services:
volumes: volumes:
- /share/docker_data/registry/auth:/auth - /share/docker_data/registry/auth:/auth
- /share/docker_registry/data:/var/lib/registry - /share/docker_registry/data:/var/lib/registry
- /share/docker_data/registry/config:/etc/docker/registry/

View File

@@ -0,0 +1,40 @@
services:
app:
environment:
REGISTRY_STORAGE_DELETE_ENABLED: 'true'
REGISTRY_AUTH: htpasswd
REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm
REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd
image: registry:2
logging:
driver: loki
options:
loki-url: http://192.168.77.101:3100/loki/api/v1/push
loki-relabel-config: |
- action: labelmap
regex: swarm_stack
replacement: namespace
- action: labelmap
regex: swarm_(service)
ports:
- target: 5000
published: 5000
protocol: tcp
mode: ingress
volumes:
- auth:/auth
- /share/docker_registry/data:/var/lib/registry
- config:/etc/docker/registry/
deploy:
labels:
wud.watch: 'false'
wud.watch.digest: 'false'
service_name: 'registry'
replicas: 1
restart_policy:
condition: any
volumes:
auth:
config:

View File

@@ -0,0 +1,34 @@
volumes:
config:
driver: local
services:
app:
command: -c /home/appuser/regsync.yml server
env_file:
- stack.env
image: ${DOCKER_REGISTRY:-}ghcr.io/regclient/regsync:latest
logging:
driver: loki
options:
loki-url: http://192.168.77.101:3100/loki/api/v1/push
loki-relabel-config: |
- action: labelmap
regex: swarm_stack
replacement: namespace
- action: labelmap
regex: swarm_(service)
stdin_open: true
volumes:
- config:/home/appuser/
- /etc/localtime:/etc/localtime
# labels:
# service_name: 'regsync'
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
service_name: 'regsync'
replicas: 1
placement:
constraints:
- node.role == manager

View File

@@ -4,3 +4,5 @@ HUB_USER=sectorq
HUB_PASS=dckr_pat_7XN8zNgj8JRPq1mlS5IvMcxJDUA HUB_PASS=dckr_pat_7XN8zNgj8JRPq1mlS5IvMcxJDUA
LOCAL_USER=jaydee LOCAL_USER=jaydee
LOCAL_PASS=l4c1j4yd33Du5lo LOCAL_PASS=l4c1j4yd33Du5lo
APPNAME=regsync
DOCKER_REGISTRY=r.sectorq.eu/library/

View File

@@ -10,7 +10,7 @@ services:
SEMAPHORE_ADMIN: administrator SEMAPHORE_ADMIN: administrator
SEMAPHORE_ADMIN_EMAIL: administrator@sectorq.eu SEMAPHORE_ADMIN_EMAIL: administrator@sectorq.eu
SEMAPHORE_ADMIN_NAME: administrator SEMAPHORE_ADMIN_NAME: administrator
SEMAPHORE_ADMIN_PASSWORD: $SEMAPHORE_ADMIN_PASSWORD SEMAPHORE_ADMIN_PASSWORD: administrator
SEMAPHORE_DB: semaphore_db SEMAPHORE_DB: semaphore_db
SEMAPHORE_DB_HOST: db SEMAPHORE_DB_HOST: db
SEMAPHORE_DB_PASS: StrongPassw0rd SEMAPHORE_DB_PASS: StrongPassw0rd
@@ -21,7 +21,7 @@ services:
SEMAPHORE_LDAP_DN_SEARCH: dc=sectorq,dc=eu SEMAPHORE_LDAP_DN_SEARCH: dc=sectorq,dc=eu
SEMAPHORE_LDAP_HOST: 192.168.77.101 SEMAPHORE_LDAP_HOST: 192.168.77.101
SEMAPHORE_LDAP_NEEDTLS: 'no' SEMAPHORE_LDAP_NEEDTLS: 'no'
SEMAPHORE_LDAP_PASSWORD: $LDAP_ADMIN_PASSWORD SEMAPHORE_LDAP_PASSWORD: administrator
SEMAPHORE_LDAP_PORT: '2389' SEMAPHORE_LDAP_PORT: '2389'
SEMAPHORE_LDAP_SEARCH_FILTER: (&(objectClass=inetOrgPerson)(uid=%s)) SEMAPHORE_LDAP_SEARCH_FILTER: (&(objectClass=inetOrgPerson)(uid=%s))
SEMAPHORE_PLAYBOOK_PATH: /tmp/semaphore/ SEMAPHORE_PLAYBOOK_PATH: /tmp/semaphore/

View File

@@ -0,0 +1,63 @@
services:
app:
dns:
- 192.168.77.101
- 192.168.77.1
secrets:
- semaphore_admin_password
environment:
SEMAPHORE_ACCESS_KEY_ENCRYPTION: MflCLIUF5bn6Lgkuwy4BoAdIFhoZ4Ief2oocXmuZSjs=
SEMAPHORE_ADMIN: admin
SEMAPHORE_ADMIN_EMAIL: jaydee@sectorq.eu
SEMAPHORE_ADMIN_NAME: admin
SEMAPHORE_ADMIN_PASSWORD_FILE: /run/secrets/semaphore_admin_password
SEMAPHORE_DB: semaphore_db
SEMAPHORE_DB_HOST: db
SEMAPHORE_DB_PASS: StrongPassw0rd
SEMAPHORE_DB_PORT: 3306
SEMAPHORE_DB_USER: semaphore_user
SEMAPHORE_LDAP_ACTIVATED: 'no'
SEMAPHORE_LDAP_DN_BIND: cn=jaydee,ou=users,dc=sectorq,dc=eu
SEMAPHORE_LDAP_DN_SEARCH: dc=sectorq,dc=eu
SEMAPHORE_LDAP_HOST: 192.168.77.101
SEMAPHORE_LDAP_NEEDTLS: 'no'
SEMAPHORE_LDAP_PASSWORD: administrator
SEMAPHORE_LDAP_PORT: '2389'
SEMAPHORE_LDAP_SEARCH_FILTER: (&(objectClass=inetOrgPerson)(uid=%s))
SEMAPHORE_PLAYBOOK_PATH: /tmp/semaphore/
TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}semaphoreui/semaphore:latest
ports:
- target: 3000
published: 3002
protocol: tcp
mode: ingress
volumes:
- /etc/localtime:/etc/localtime:ro
- data:/etc/semaphore/
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
db:
environment:
MYSQL_DATABASE: semaphore_db
MYSQL_PASSWORD: StrongPassw0rd
MYSQL_RANDOM_ROOT_PASSWORD: 'yes'
MYSQL_USER: semaphore_user
image: ${DOCKER_REGISTRY:-}mysql:8.0
volumes:
- db:/var/lib/mysql
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
volumes:
data:
db:
secrets:
semaphore_admin_password:
external: true

View File

@@ -0,0 +1,21 @@
services:
uptime-kuma:
image: ${DOCKER_REGISTRY:-}louislam/uptime-kuma:nightly2
ports:
- target: 3001
published: 3001
protocol: tcp
mode: ingress
volumes:
- data:/app/data
- /var/run/docker.sock:/var/run/docker.sock
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
restart_policy:
condition: any
volumes:
data:

View File

@@ -0,0 +1,50 @@
services:
heimdall:
environment:
PUID: '1000'
PGID: '1000'
TZ: Europe/Bratislava
image: ${DOCKER_REGISTRY:-}lscr.io/linuxserver/heimdall:latest
ports:
- target: 80
published: 8084
protocol: tcp
mode: ingress
- target: 443
published: 4437
protocol: tcp
mode: ingress
volumes:
- heimdall_config:/config
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
web:
environment:
NGINX_HOST: sectorq.eu
NGINX_PORT: '80'
image: ${DOCKER_REGISTRY:-}nginx:latest
ports:
- target: 80
published: 48000
protocol: tcp
mode: ingress
volumes:
- webhub_data:/usr/share/nginx/html
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
heimdall_config:
webhub_data:

View File

@@ -0,0 +1,52 @@
services:
db:
image: ${DOCKER_REGISTRY:-}mariadb:10.6.4-focal
command: --default-authentication-plugin=mysql_native_password
volumes:
- db_data:/var/lib/mysql
secrets:
- wordpress_db_password
- wordpress_root_db_password
environment:
MYSQL_ROOT_PASSWORD: wordpress
MYSQL_DATABASE: wordpress
MYSQL_USER: wordpress
MYSQL_PASSWORD_FILE: /run/secrets/wordpress_db_password
MYSQL_HOST: '%'
expose:
- 3306
- 33060
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
wordpress:
image: ${DOCKER_REGISTRY:-}wordpress:latest
volumes:
- wp_data:/var/www/html
ports:
- target: 80
published: 8098
protocol: tcp
mode: ingress
secrets:
- wordpress_db_password
environment:
WORDPRESS_DB_HOST: db
WORDPRESS_DB_USER: wordpress
WORDPRESS_DB_PASSWORD_FILE: /run/secrets/wordpress_db_password
WORDPRESS_DB_NAME: wordpress
deploy:
replicas: 1
placement:
constraints:
- node.role == manager
volumes:
db_data: null
wp_data: null
secrets:
wordpress_db_password:
external: true
wordpress_root_db_password:
external: true

View File

@@ -16,7 +16,7 @@ WUD_AUTH_OIDC_AUTHENTIK_TIMEOUT=2000
# BASIC # BASIC
WUD_AUTH_BASIC_JAYDEE_USER=homepage WUD_AUTH_BASIC_JAYDEE_USER=homepage
WUD_AUTH_BASIC_JAYDEE_HASH=$$apr1$$pGMz1QxU$$y6XuTscvGcYgas15JWlfg/ WUD_AUTH_BASIC_JAYDEE_HASH=$apr1$OZNN/65l$cQp3tMfyQdftPIgje.uDZ1
# GOTIFY # GOTIFY
WUD_TRIGGER_GOTIFY_EXTERNAL_PRIORITY=0 WUD_TRIGGER_GOTIFY_EXTERNAL_PRIORITY=0
WUD_TRIGGER_GOTIFY_EXTERNAL_TOKEN="AFxvpm1JpPSsmkf" WUD_TRIGGER_GOTIFY_EXTERNAL_TOKEN="AFxvpm1JpPSsmkf"
@@ -47,14 +47,14 @@ WUD_LOG_LEVEL=debug
# text json # text json
WUD_LOG_FORMAT=text WUD_LOG_FORMAT=text
WUD_WATCHER_EXTDOCKER_HOST=193.168.144.164 # WUD_WATCHER_EXTDOCKER_HOST=193.168.144.164
WUD_WATCHER_EXTDOCKER_PORT=2376 # WUD_WATCHER_EXTDOCKER_PORT=2376
WUD_WATCHER_EXTDOCKER_CERTFILE=/certs/ext/cert.pem # WUD_WATCHER_EXTDOCKER_CERTFILE=/certs/ext/cert.pem
WUD_WATCHER_EXTDOCKER_CAFILE=/certs/ext/ca.pem # WUD_WATCHER_EXTDOCKER_CAFILE=/certs/ext/ca.pem
WUD_WATCHER_EXTDOCKER_KEYFILE=/certs/ext/key.pem # WUD_WATCHER_EXTDOCKER_KEYFILE=/certs/ext/key.pem
WUD_WATCHER_EXTDOCKER_CRON=0 * * * * # WUD_WATCHER_EXTDOCKER_CRON=0 * * * *
WUD_WATCHER_EXTDOCKER_WATCHALL=true # WUD_WATCHER_EXTDOCKER_WATCHALL=true
WUD_WATCHER_EXTDOCKER_WATCHBYDEFAULT=true # WUD_WATCHER_EXTDOCKER_WATCHBYDEFAULT=true
WUD_WATCHER_MSERVER_HOST=192.168.77.101 WUD_WATCHER_MSERVER_HOST=192.168.77.101
WUD_WATCHER_MSERVER_PORT=2376 WUD_WATCHER_MSERVER_PORT=2376
@@ -65,14 +65,23 @@ WUD_WATCHER_MSERVER_CRON=0 * * * *
WUD_WATCHER_MSERVER_WATCHALL=true WUD_WATCHER_MSERVER_WATCHALL=true
WUD_WATCHER_MSERVER_WATCHBYDEFAULT=false WUD_WATCHER_MSERVER_WATCHBYDEFAULT=false
WUD_WATCHER_RPI5_HOST=192.168.77.238 WUD_WATCHER_MS_HOST=192.168.77.101
WUD_WATCHER_RPI5_PORT=2376 WUD_WATCHER_MS_PORT=2376
WUD_WATCHER_RPI5_CERTFILE=/certs/rpi5/cert.pem WUD_WATCHER_MS_CERTFILE=/certs/m-server/cert.pem
WUD_WATCHER_RPI5_CAFILE=/certs/rpi5/ca.pem WUD_WATCHER_MS_CAFILE=/certs/m-server/ca.pem
WUD_WATCHER_RPI5_KEYFILE=/certs/rpi5/key.pem WUD_WATCHER_MS_KEYFILE=/certs/m-server/key.pem
WUD_WATCHER_RPI5_CRON=0 * * * * WUD_WATCHER_MS_CRON=0 * * * *
WUD_WATCHER_RPI5_WATCHALL=true WUD_WATCHER_MS_WATCHALL=true
WUD_WATCHER_RPI5_WATCHBYDEFAULT=true WUD_WATCHER_MS_WATCHBYDEFAULT=true
# WUD_WATCHER_RPI5_HOST=192.168.77.238
# WUD_WATCHER_RPI5_PORT=2376
# WUD_WATCHER_RPI5_CERTFILE=/certs/rpi5/cert.pem
# WUD_WATCHER_RPI5_CAFILE=/certs/rpi5/ca.pem
# WUD_WATCHER_RPI5_KEYFILE=/certs/rpi5/key.pem
# WUD_WATCHER_RPI5_CRON=0 * * * *
# WUD_WATCHER_RPI5_WATCHALL=true
# WUD_WATCHER_RPI5_WATCHBYDEFAULT=true
WUD_WATCHER_NAS_HOST=192.168.77.106 WUD_WATCHER_NAS_HOST=192.168.77.106
WUD_WATCHER_NAS_PORT=2376 WUD_WATCHER_NAS_PORT=2376
@@ -83,13 +92,13 @@ WUD_WATCHER_NAS_CRON=0 * * * *
WUD_WATCHER_NAS_WATCHALL=true WUD_WATCHER_NAS_WATCHALL=true
WUD_WATCHER_NAS_WATCHBYDEFAULT=true WUD_WATCHER_NAS_WATCHBYDEFAULT=true
WUD_WATCHER_RACK_HOST=192.168.77.55 # WUD_WATCHER_RACK_HOST=192.168.77.55
WUD_WATCHER_RACK_PORT=2376 # WUD_WATCHER_RACK_PORT=2376
WUD_WATCHER_RACK_CERTFILE=/certs/rack/cert.pem # WUD_WATCHER_RACK_CERTFILE=/certs/rack/cert.pem
WUD_WATCHER_RACK_CAFILE=/certs/rack/ca.pem # WUD_WATCHER_RACK_CAFILE=/certs/rack/ca.pem
WUD_WATCHER_RACK_KEYFILE=/certs/rack/key.pem # WUD_WATCHER_RACK_KEYFILE=/certs/rack/key.pem
WUD_WATCHER_RACK_CRON=0 * * * * # WUD_WATCHER_RACK_CRON=0 * * * *
WUD_WATCHER_RACK_WATCHALL=true # WUD_WATCHER_RACK_WATCHALL=true
WUD_WATCHER_RACK_WATCHBYDEFAULT=true # WUD_WATCHER_RACK_WATCHBYDEFAULT=true
WUD_SERVER_CORS_ENABLED=true WUD_SERVER_CORS_ENABLED=true

41
__swarm/wud/wud-swarm.yml Normal file
View File

@@ -0,0 +1,41 @@
services:
app:
env_file:
- stack.env
image: ${DOCKER_REGISTRY:-}getwud/wud
# logging:
# driver: loki
# options:
# loki-url: http://192.168.77.101:3100/loki/api/v1/push
ports:
- target: 3000
published: 3008
protocol: tcp
mode: ingress
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- data:/store
- certs:/certs
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: wud_app
homepage.description: Docker container management
homepage.group: Infrastructure
homepage.href: https://wud.sectorq.eu
homepage.icon: /images/wud-logo.png
homepage.name: What's Up Docker
homepage.server: my-docker-swarm
homepage.weight: '1'
homepage.widget.password: l4c1j4yd33Du5lo
homepage.widget.type: whatsupdocker
homepage.widget.url: https://wud.sectorq.eu
homepage.widget.username: homepage
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
restart_policy:
condition: any
volumes:
data:
certs:

View File

@@ -1,2 +1,3 @@
APPNAME=zabbix APPNAME=zabbix
#RESTART=always #RESTART=always
DOCKER_REGISTRY=r.sectorq.eu/library/

View File

@@ -0,0 +1,5 @@
FROM zabbix/zabbix-server-pgsql:alpine-latest
USER root
RUN apk add --no-cache curl
USER zabbix

View File

@@ -48,7 +48,11 @@ services:
extends: extends:
file: logging.yml file: logging.yml
service: ${LOGGING:-syslog} service: ${LOGGING:-syslog}
image: ${DOCKER_REGISTRY:-}zabbix/zabbix-server-pgsql:alpine-latest #image: ${DOCKER_REGISTRY:-}zabbix/zabbix-server-pgsql:alpine-latest
build:
context: .
dockerfile: Dockerfile
image: zabbix-server-custom:latest
volumes: volumes:
- /share/docker_data/zabbix-server/server/alertscripts:/usr/lib/zabbix/alertscripts - /share/docker_data/zabbix-server/server/alertscripts:/usr/lib/zabbix/alertscripts
labels: labels:

View File

@@ -1,6 +1,6 @@
# Zabbix global # Zabbix global
# ZBX_DEBUGLEVEL=3 # ZBX_DEBUGLEVEL=3
DOCKER_REGISTRY=r.sectorq.eu/library/
# Database # Database
MYSQL_PASSWORD=zabbix MYSQL_PASSWORD=zabbix
MYSQL_USER=zabbix MYSQL_USER=zabbix
@@ -127,5 +127,5 @@ ZBX_CACHESIZE=64M
# ZBX_TLSFRONTENDCERTISSUER= # Available since 7.4.0 # ZBX_TLSFRONTENDCERTISSUER= # Available since 7.4.0
# ZBX_TLSFRONTENDCERTSUBJECT= # Available since 7.4.0 # ZBX_TLSFRONTENDCERTSUBJECT= # Available since 7.4.0
ZBX_WEBDRIVERURL=192.168.77.101:4444 # Available since 7.0.0 ZBX_WEBDRIVERURL=192.168.77.101:4444 # Available since 7.0.0
ZBX_STARTBROWSERPOLLERS=5 # Available since 7.0.0 #ZBX_STARTBROWSERPOLLERS=0 # Available since 7.0.0
# ZBX_STARTSNMPPOLLERS=1 # Available since 7.0.0 # ZBX_STARTSNMPPOLLERS=1 # Available since 7.0.0

View File

@@ -0,0 +1,133 @@
networks:
zabbix:
driver: overlay
ipam:
config:
- subnet: 192.168.89.0/28
driver: default
services:
db-server:
env_file:
- stack.env
image: ${DOCKER_REGISTRY:-}postgres:16-alpine
networks:
zabbix:
ipv4_address: 192.168.89.4
ports:
- target: 5432
published: 5432
protocol: tcp
mode: ingress
volumes:
- db:/var/lib/postgresql/data
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
zabbix-frontend:
env_file:
- stack.env
image: ${DOCKER_REGISTRY:-}zabbix/zabbix-web-nginx-pgsql:alpine-latest
networks:
zabbix:
ipv4_address: 192.168.89.3
ports:
- target: 8080
published: 8051
protocol: tcp
mode: ingress
- target: 8443
published: 4435
protocol: tcp
mode: ingress
volumes:
- certs:/usr/share/zabbix/conf/certs
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
zabbix-server:
env_file:
- stack.env
#image: ${DOCKER_REGISTRY:-}zabbix/zabbix-server-pgsql:alpine-latest
image: r.sectorq.eu/jaydee/zabbix-server-pgsql:latest
volumes:
- alertscripts:/usr/lib/zabbix/alertscripts
networks:
zabbix:
ipv4_address: 192.168.89.2
ports:
- target: 10051
published: 10051
protocol: tcp
mode: ingress
deploy:
labels:
com.centurylinklabs.watchtower.enable: 'true'
homepage.container: zabbix-server_zabbix-server
homepage.description: Monitoring server
homepage.group: Utilities
homepage.href: https://${APPNAME}.sectorq.eu
homepage.icon: ${APPNAME}.png
homepage.name: Zabbix Server
homepage.server: my-docker-swarm
homepage.weight: '90'
homepage.widget.key: 431bda3fbb45a9d603c1b74d57c3a61df1e07124c5c7119cb6379194d5555822
homepage.widget.type: ${APPNAME}
homepage.widget.url: https://${APPNAME}.sectorq.eu
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
webdriver:
ports:
- target: 4444
published: 4444
protocol: tcp
mode: ingress
- target: 7900
published: 7900
protocol: tcp
mode: ingress
image: ${DOCKER_REGISTRY:-}docker.io/selenium/standalone-chrome:latest
deploy:
labels:
wud.watch: 'true'
wud.watch.digest: 'true'
replicas: 1
placement:
constraints:
- node.role == manager
# postinstall:
# image: debian:12-slim
# environment:
# PUID: '0'
# PGID: '0'
# volumes:
# - /usr/bin:/usr/bin
# - /usr/lib:/usr/lib
# - /var/run/docker.sock:/var/run/docker.sock
# - scripts:/scripts
# entrypoint:
# - /bin/sh
# - /scripts/install-curl.sh
# deploy:
# replicas: 1
# placement:
# constraints:
# - node.role == manager
volumes:
db:
certs:
alertscripts:
scripts:

View File

@@ -117,3 +117,11 @@ volumes:
driver: local driver: local
redis: redis:
driver: local driver: local
media:
driver: local
templates:
driver: local
certs:
driver: local
custom-templates:
driver: local

View File

@@ -1,2 +1,4 @@
APPNAME=bitwarden APPNAME=bitwarden
DOCKER_REGISTRY=r.sectorq.eu/library/ DOCKER_REGISTRY=r.sectorq.eu/library/
ADMIN_PASSWORD=l4c1j4yd33Du5lo
SMTP_PASSWORD=l4c1j4yd33Du5lo

View File

@@ -1,3 +1,6 @@
volumes:
data:
driver: local
services: services:
bitwarden: bitwarden:
container_name: vaultwarden container_name: vaultwarden
@@ -29,4 +32,4 @@ services:
- 8181:80 - 8181:80
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/bitwarden/bw-data:/data - data:/data

View File

@@ -21,7 +21,7 @@ services:
- 6875:80 - 6875:80
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/bookstack/bookstack_app_data:/config - app_data:/config
db: db:
env_file: env_file:
- stack.env - stack.env
@@ -34,5 +34,9 @@ services:
wud.watch.digest: true wud.watch.digest: true
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/bookstack/bookstack_db_data:/config - db_data:/config
version: '2' volumes:
app_data:
driver: local
db_data:
driver: local

View File

@@ -10,5 +10,7 @@ services:
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- /share/docker_data/dockermon/config:/config - config:/config
version: '2' volumes:
config:
driver: local

View File

@@ -21,7 +21,7 @@ services:
homepage.name: Gitea homepage.name: Gitea
homepage.server: my-docker homepage.server: my-docker
homepage.weight: 1 homepage.weight: 1
homepage.widget.key: ${TOKEN} homepage.widget.key: a39e12bdd3fc724d01827b16ae6136c9229ffb16
homepage.widget.type: ${APPNAME} homepage.widget.type: ${APPNAME}
homepage.widget.url: https://${APPNAME}.sectorq.eu homepage.widget.url: https://${APPNAME}.sectorq.eu
homepage.widget.version: 2 homepage.widget.version: 2
@@ -34,22 +34,36 @@ services:
- '222:22' - '222:22'
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/gitea:/data - app_data:/data
- /etc/timezone:/etc/timezone:ro - /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
runner: runner:
image: ${DOCKER_REGISTRY:-}docker.io/gitea/act_runner:nightly image: ${DOCKER_REGISTRY:-}docker.io/gitea/act_runner:nightly
# secrets:
# - gitea_runner_registration_token
environment: environment:
CONFIG_FILE: /config/config.yaml CONFIG_FILE: /config/config.yaml
GITEA_INSTANCE_URL: "https://gitea.sectorq.eu/" GITEA_INSTANCE_URL: "https://gitea.sectorq.eu/"
GITEA_RUNNER_REGISTRATION_TOKEN: "8nmKqJhkvYwltmNfF2o9vs0tzo70ufHSQpVg6ymb" GITEA_RUNNER_REGISTRATION_TOKEN: "8nmKqJhkvYwltmNfF2o9vs0tzo70ufHSQpVg6ymb"
#GITEA_RUNNER_REGISTRATION_TOKEN_FILE: /srv/secrets/gitea_runner_registration_token
GITEA_RUNNER_NAME: jaydee GITEA_RUNNER_NAME: jaydee
GITEA_RUNNER_LABELS: jaydee GITEA_RUNNER_LABELS: jaydee
volumes: volumes:
- /share/docker_data/gitea-runner/config:/config - runner_config:/config
- /share/docker_data/gitea-runner/data:/data - runner_data:/data
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
labels: labels:
wud.watch: true wud.watch: true
wud.watch.digest: true wud.watch.digest: true
volumes:
app_data:
driver: local
runner_config:
driver: local
runner_data:
driver: local
# secrets:
# gitea_runner_registration_token:
# external: true

View File

@@ -45,7 +45,6 @@ services:
- 8743:443 - 8743:443
- '8722:22' - '8722:22'
restart: unless-stopped restart: unless-stopped
shm_size: 4gb
volumes: volumes:
- /share/docker_data/gitlab/config:/etc/gitlab - /share/docker_data/gitlab/config:/etc/gitlab
- /share/docker_data/gitlab/logs:/var/log/gitlab - /share/docker_data/gitlab/logs:/var/log/gitlab

View File

@@ -1,6 +1,10 @@
name: grafana name: grafana
networks: networks:
loki: null loki: null
volumes:
loki_data:
grafana_data:
grafana_certs:
services: services:
grafana: grafana:
container_name: grafana container_name: grafana
@@ -49,11 +53,13 @@ services:
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
user: 0:0 user: 0:0
volumes: volumes:
- /share/docker_data/grafana/data:/var/lib/grafana - grafana_data:/var/lib/grafana
- /share/docker_data/grafana/certs:/certs - grafana_certs:/certs
loki: loki:
command: -config.file=/etc/loki/local-config.yaml command: -config.file=/etc/loki/local-config.yaml
image: ${DOCKER_REGISTRY:-}grafana/loki:latest image: ${DOCKER_REGISTRY:-}grafana/loki:latest
volumes:
- loki_data:/loki
labels: labels:
wud.watch: true wud.watch: true
wud.watch.digest: true wud.watch.digest: true

View File

@@ -54,7 +54,7 @@ services:
container_name: esphome container_name: esphome
image: ${DOCKER_REGISTRY:-}esphome/esphome:latest image: ${DOCKER_REGISTRY:-}esphome/esphome:latest
volumes: volumes:
- /share/docker_data/esphome/config:/config - /share/docker_data/esphome_config:/config
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
privileged: true privileged: true

View File

@@ -144,7 +144,7 @@ services:
ports: ports:
- 8096:8096 - 8096:8096
- 8920:8920 - 8920:8920
- 7359:7359/udp - 7359:7359
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/jellyfin:/config - /share/docker_data/jellyfin:/config
@@ -154,6 +154,7 @@ services:
- /media/nas/movies:/data/movies/nas - /media/nas/movies:/data/movies/nas
- /media/nas/music:/data/music/nas - /media/nas/music:/data/music/nas
- /media/nas/shows:/data/shows/nas - /media/nas/shows:/data/shows/nas
- /media/nas/live:/data/live/nas
- /media/nas/xxx:/data/xxx/nas - /media/nas/xxx:/data/xxx/nas
jellyseerr: jellyseerr:
container_name: jellyseerr container_name: jellyseerr
@@ -248,7 +249,7 @@ services:
ports: ports:
- 8085:8085 - 8085:8085
- 6881:6881 - 6881:6881
- 6881:6881/udp - 6881:6881
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/qbittorrent/config:/config - /share/docker_data/qbittorrent/config:/config

View File

@@ -18,7 +18,7 @@ services:
- N8N_SECURE_COOKIE=false - N8N_SECURE_COOKIE=false
- WEBHOOK_URL=https://n8n.sectorq.eu - WEBHOOK_URL=https://n8n.sectorq.eu
volumes: volumes:
- /share/docker_data/n8n/n8n-data:/home/node/.n8n - n8n-data:/home/node/.n8n
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
stop_grace_period: 60s stop_grace_period: 60s
labels: labels:
@@ -30,4 +30,7 @@ services:
homepage.name: n8n homepage.name: n8n
homepage.server: my-docker homepage.server: my-docker
wud.watch: true wud.watch: true
wud.watch.digest: true wud.watch.digest: true
volumes:
n8n-data:
driver: local

View File

@@ -2,7 +2,7 @@ networks:
nextcloud_network: nextcloud_network:
ipam: ipam:
config: config:
- subnet: 192.168.80.0/28 - subnet: 192.168.90.0/28
driver: default driver: default
pihole_pihole: pihole_pihole:
external: true external: true

View File

@@ -33,6 +33,17 @@ services:
TZ: Europe/Bratislava TZ: Europe/Bratislava
hostname: m-server hostname: m-server
image: pihole/pihole:latest image: pihole/pihole:latest
shm_size: '256mb'
logging:
driver: loki
options:
loki-url: http://192.168.77.101:3100/loki/api/v1/push
loki-relabel-config: |
- action: labelmap
regex: swarm_stack
replacement: namespace
- action: labelmap
regex: swarm_(service)
labels: labels:
com.centurylinklabs.watchtower.enable: 'true' com.centurylinklabs.watchtower.enable: 'true'
homepage.container: pihole homepage.container: pihole
@@ -53,11 +64,15 @@ services:
pihole: pihole:
ipv4_address: 192.168.78.254 ipv4_address: 192.168.78.254
ports: ports:
- 53:53/tcp - '53:53/udp'
- 53:53/udp - '9380:80'
- 9380:80/tcp - '9343:443'
- 9343:443/tcp
restart: always restart: always
volumes: volumes:
- /share/docker_data/pihole/etc-pihole:/etc/pihole - data:/etc/pihole
- /share/docker_data/pihole/etc-dnsmasq.d:/etc/dnsmasq.d - dnsmasq_d:/etc/dnsmasq.d
volumes:
data:
driver: local
dnsmasq_d:
driver: local

View File

@@ -12,4 +12,6 @@ services:
privileged: true privileged: true
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
volumes: volumes:
- /share/docker_data/rancher:/var/lib/rancher - data:/var/lib/rancher
volumes:
data:

View File

@@ -21,3 +21,4 @@ services:
volumes: volumes:
- /share/docker_data/registry/auth:/auth - /share/docker_data/registry/auth:/auth
- /share/docker_registry/data:/var/lib/registry - /share/docker_registry/data:/var/lib/registry
- /share/docker_data/registry/config:/etc/docker/registry/

View File

@@ -1,4 +1,6 @@
name: regsync volumes:
data:
driver: local
services: services:
regsync: regsync:
command: -c /home/appuser/regsync.yml server command: -c /home/appuser/regsync.yml server
@@ -16,5 +18,5 @@ services:
restart: ${RESTART:-unless-stopped} restart: ${RESTART:-unless-stopped}
stdin_open: true stdin_open: true
volumes: volumes:
- /share/docker_data/regsync/regsync.yml:/home/appuser/regsync.yml - data:/home/appuser/
- /etc/localtime:/etc/localtime - /etc/localtime:/etc/localtime

View File

@@ -1 +0,0 @@
pyyaml

View File

@@ -19,7 +19,7 @@ WUD_AUTH_BASIC_JAYDEE_USER=homepage
WUD_AUTH_BASIC_JAYDEE_HASH=$$apr1$$pGMz1QxU$$y6XuTscvGcYgas15JWlfg/ WUD_AUTH_BASIC_JAYDEE_HASH=$$apr1$$pGMz1QxU$$y6XuTscvGcYgas15JWlfg/
# GOTIFY # GOTIFY
WUD_TRIGGER_GOTIFY_EXTERNAL_PRIORITY=0 WUD_TRIGGER_GOTIFY_EXTERNAL_PRIORITY=0
WUD_TRIGGER_GOTIFY_EXTERNAL_TOKEN="AFxvpm1JpPSsmkf" WUD_TRIGGER_GOTIFY_EXTERNAL_TOKEN="Ap-o0PU3hbTFI_."
WUD_TRIGGER_GOTIFY_EXTERNAL_URL=https://gotify.sectorq.eu WUD_TRIGGER_GOTIFY_EXTERNAL_URL=https://gotify.sectorq.eu
#WUD_TRIGGER_GOTIFY_EXTERNAL_MODE=batch #WUD_TRIGGER_GOTIFY_EXTERNAL_MODE=batch
WUD_TRIGGER_GOTIFY_EXTERNAL_MODE=simple WUD_TRIGGER_GOTIFY_EXTERNAL_MODE=simple

View File

@@ -1,121 +0,0 @@
import yaml
import sys
import copy
def default_deploy():
return {
"mode": "replicated",
"replicas": 1,
"restart_policy": {"condition": "any"},
"labels": {},
"placement": {
"constraints": [
"node.role == manager"
]
}
}
def convert_service(service):
swarm_service = {}
# Create a fresh deploy section each time (avoids YAML anchors)
deploy_section = default_deploy()
for key, value in service.items():
#print(key, value)
# Unsupported in Swarm
if key in ["container_name", "restart", "depends_on"]:
continue
# Move labels → deploy.labels
#print(f"Labels: {deploy_section['labels']}")
if key == "labels":
input(f"Key: {key} Value: {value}")
#print("Processing Labels:")
if isinstance(value, dict):
deploy_section["labels"].update({k: str(v).lower() for k, v in value.items()})
elif isinstance(value, list):
for item in value:
if "=" in item:
k, v = item.split("=", 1)
deploy_section["labels"][k] = str(v).lower()
continue
swarm_service[key] = value
# for en in swarm_service['environment']:
# #print(f"Environment Variable: {en} : {swarm_service['environment'][en]}")
# print(en)
# print(swarm_service['environment'][en])
# swarm_service['environment'][en] = str(swarm_service['environment'][en]).lower()
#print("Deploy Section:")
#print(swarm_service)
# Merge user deploy section if present
#input(service)
if "deploy" in service:
user_deploy = service["deploy"]
#print("User Deploy Section:")
# merge deploy.labels
if "labels" in user_deploy:
##print("User Deploy Labels:")
labels = user_deploy["labels"]
if isinstance(labels, dict):
deploy_section["labels"].update(labels)
elif isinstance(labels, list):
for item in labels:
#print(f"Label Item: {item}")
if "=" in item:
k, v = item.split("=", 1)
deploy_section["labels"][k] = str(v).lower()
# merge placement constraints
if "placement" in user_deploy:
if "constraints" in user_deploy["placement"]:
deploy_section["placement"]["constraints"].extend(
user_deploy["placement"]["constraints"]
)
# merge other keys
for dk, dv in user_deploy.items():
if dk not in ["labels", "placement"]:
deploy_section[dk] = copy.deepcopy(dv)
swarm_service["deploy"] = deploy_section
return swarm_service
def convert_compose_to_swarm(app):
output_file = "__swarm/" + app + "/" + app + "-swarm.yml"
input_file = app + "/docker-compose.yml"
with open(input_file, "r") as f:
compose = yaml.safe_load(f)
swarm = {"version": "3.9", "services": {}}
for name, service in compose.get("services", {}).items():
swarm["services"][name] = convert_service(service)
for section in ["networks", "volumes", "configs", "secrets"]:
if section in compose:
swarm[section] = compose[section]
# Prevent PyYAML from creating anchors
class NoAliasDumper(yaml.SafeDumper):
def ignore_aliases(self, data):
return True
with open(output_file, "w") as f:
yaml.dump(swarm, f, sort_keys=False, Dumper=NoAliasDumper)
print(f"✔ Swarm file written to: {output_file}")
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: python convert_to_swarm.py app_name")
sys.exit(1)
convert_compose_to_swarm(sys.argv[1])

View File

@@ -1,99 +0,0 @@
import yaml
import sys
stack_name = sys.argv[1]
INPUT_FILE = f"{stack_name}/docker-compose.yml"
OUTPUT_FILE = f"__swarm/{stack_name}/{stack_name}-swarm.yml"
def convert_ports(ports):
"""Convert short port syntax to Swarm long syntax."""
result = []
for p in ports:
if isinstance(p, str):
# format: "8080:80"
pub, tgt = p.split(":")
result.append({
"target": int(tgt),
"published": int(pub),
"protocol": "tcp",
"mode": "ingress"
})
else:
result.append(p)
return result
def to_str_lower(value):
"""Convert value to string. Booleans become lowercase 'true'/'false'."""
if isinstance(value, bool):
return "true" if value else "false"
return str(value)
def env_list_to_dict(env_list):
"""Convert environment from list ['KEY=VAL'] to dict {KEY: VAL} as strings."""
env_dict = {}
for item in env_list:
key, value = item.split("=", 1)
# convert 'true'/'false' strings to lowercase
if value.lower() in ["true", "false"]:
env_dict[key] = value.lower()
else:
env_dict[key] = str(value)
return env_dict
def ensure_labels_as_string(labels):
"""Ensure all label values are strings, lowercase for booleans."""
return {k: to_str_lower(v) for k, v in labels.items()}
def convert_compose_to_swarm(data):
services = data.get("services", {})
for name, svc in services.items():
# 1) Convert environment list → dict (strings)
if "environment" in svc and isinstance(svc["environment"], list):
svc["environment"] = env_list_to_dict(svc["environment"])
# 2) Ensure deploy exists
deploy = svc.setdefault("deploy", {})
# 3) Move labels into deploy.labels, all as strings (lowercase booleans)
if "labels" in svc:
deploy.setdefault("labels", {})
if isinstance(svc["labels"], dict):
deploy["labels"].update(ensure_labels_as_string(svc["labels"]))
elif isinstance(svc["labels"], list):
for label in svc["labels"]:
key, value = label.split("=", 1)
deploy["labels"][key] = value.lower() if value.lower() in ["true", "false"] else str(value)
del svc["labels"]
# 4) Default replicas
deploy.setdefault("replicas", 1)
# 5) Add placement constraint
deploy.setdefault("placement", {})
deploy["placement"].setdefault("constraints", [])
if "node.role == manager" not in deploy["placement"]["constraints"]:
deploy["placement"]["constraints"].append("node.role == manager")
# 6) Convert ports to long format
if "ports" in svc:
svc["ports"] = convert_ports(svc["ports"])
# 7) Remove container_name (not allowed in Swarm)
svc.pop("container_name", None)
return data
def main():
with open(INPUT_FILE, "r") as f:
compose = yaml.safe_load(f)
swarm = convert_compose_to_swarm(compose)
with open(OUTPUT_FILE, "w") as f:
yaml.dump(swarm, f, sort_keys=False)
print(f"Swarm stack file written to {OUTPUT_FILE}")
if __name__ == "__main__":
main()