Compare commits

..

180 commits

Author SHA1 Message Date
39b11f9d26 Update image ghcr.io/siderolabs/kubelet to v1.31.2 2024-10-23 13:32:35 +00:00
4d9a8e32d7
update sonarr to work with pgb and no push secrets 2024-10-22 22:54:11 -05:00
2f275ed5b9
add lb to pgbouncer 2024-10-22 22:34:49 -05:00
f532b5416f
correct db name 2024-10-22 22:18:12 -05:00
58a3d4919b
correct db names 2024-10-22 22:16:57 -05:00
b4ecf685c3
revert 2024-10-22 22:07:17 -05:00
c8b07d60c7
two secrets in one 2024-10-22 22:05:10 -05:00
1bc4e9fdf3
debug 2024-10-22 21:44:34 -05:00
f821f153d7
debug 2024-10-22 21:35:05 -05:00
a63223df51
correct name 2024-10-22 21:31:25 -05:00
455218cf64
separate cluster store secrets 2024-10-22 21:29:03 -05:00
6b0cb19bfd
reduce resources and move secret to new namespace 2024-10-22 21:13:34 -05:00
65de770fcc
add anime ns with dedi radarr/sonarr 2024-10-22 20:57:38 -05:00
c223f3e889
add anime to trashids to sonarr 2024-10-22 18:59:37 -05:00
1e512d354c
add jitter between volsync backups 2024-10-22 17:28:27 -05:00
c3443e87be
Add new and improved taskfile by onedr0p 2024-10-22 16:12:55 -05:00
a070f8381b
include other namespaces that include the privileged-movers label 2024-10-22 16:12:35 -05:00
f4ec9777a5 Merge pull request 'local volsync backup from s3 --> nfs' (#767) from volsync-s3-->nfs into main
Reviewed-on: #767
2024-10-22 14:18:13 -05:00
3f2c831c66
local volsync backup from s3 --> nfs 2024-10-22 14:08:21 -05:00
f87492fb84
check out new zfs dash and fix typo in dep 2024-10-22 12:40:59 -05:00
1f6a00c005
add chronyd 2024-10-22 11:26:30 -05:00
347aa68119 Merge pull request 'Update chart vector to 0.37.0' (#761) from renovate/vector into main
Reviewed-on: #761
2024-10-21 23:12:42 -05:00
7b8ed73b92 Merge pull request 'Update image docker.io/ollama/ollama to v0.3.14' (#760) from renovate/docker.io-ollama-ollama-0.x into main
Reviewed-on: #760
2024-10-21 23:11:28 -05:00
e6878640ee Merge pull request 'Update image docker.io/excalidraw/excalidraw to 687708a' (#762) from renovate/docker.io-excalidraw-excalidraw-latest into main
Reviewed-on: #762
2024-10-21 23:10:40 -05:00
a696c83d44 Merge pull request 'Update image 1337kavin/piped-proxy to 47cf993' (#763) from renovate/1337kavin-piped-proxy-latest into main
Reviewed-on: #763
2024-10-21 23:10:30 -05:00
67c92a6f2d Merge pull request 'Update image quay.io/redlib/redlib to a3f1eca' (#764) from renovate/quay.io-redlib-redlib-latest into main
Reviewed-on: #764
2024-10-21 23:10:19 -05:00
f5c5ab732d Merge pull request 'Update image ghcr.io/onedr0p/prowlarr-develop to v1.25.4.4818' (#765) from renovate/ghcr.io-onedr0p-prowlarr-develop-1.x into main
Reviewed-on: #765
2024-10-21 23:10:08 -05:00
eb43276149 Update image ghcr.io/onedr0p/prowlarr-develop to v1.25.4.4818 2024-10-22 03:32:31 +00:00
cfcb7ebe19 Update image quay.io/redlib/redlib to a3f1eca 2024-10-22 01:02:48 +00:00
e0fd822691
revert zfs dashboard 2024-10-21 19:30:43 -05:00
7e91167a4c
debug 2024-10-21 19:28:30 -05:00
3849281f09
duplicate dashboard 2024-10-21 19:24:19 -05:00
c4c03d75fb
test zfs graph 2024-10-21 19:18:32 -05:00
632d260a2e
deploy vm scrape config 2024-10-21 19:17:02 -05:00
7b86253515
add zfs-exporter scrape config 2024-10-21 19:15:38 -05:00
2c8cc873a7 Update image 1337kavin/piped-proxy to 47cf993 2024-10-21 20:32:32 +00:00
36ec208d8d Update image docker.io/excalidraw/excalidraw to 687708a 2024-10-21 19:02:35 +00:00
70fd6f6d3d
add crunchy postgres dashboards 2024-10-21 13:19:06 -05:00
a786069bcc
add monitoring to crunchy postgres 2024-10-21 13:12:49 -05:00
c2a8a165e0 Update chart vector to 0.37.0 2024-10-21 18:02:32 +00:00
be0f0bc52e
add grafana 2024-10-21 11:46:13 -05:00
09baa7cf09
dep on hr not ks 2024-10-21 10:40:48 -05:00
d9c56177da
correct dependency 2024-10-21 10:39:34 -05:00
7f9e4a2504
add unpoller 2024-10-21 10:37:53 -05:00
ac4282a6c5
add descheduler 2024-10-21 09:18:53 -05:00
93d34a811b Update image docker.io/ollama/ollama to v0.3.14 2024-10-21 03:02:31 +00:00
fe1088b239 Merge pull request 'Update image ghcr.io/cross-seed/cross-seed to v6.0.0-43' (#754) from renovate/ghcr.io-cross-seed-cross-seed-6.x into main
Reviewed-on: #754
2024-10-20 19:35:28 -05:00
646583d2aa Merge pull request 'Update image ghcr.io/onedr0p/prowlarr-develop to v1.25.3.4815' (#755) from renovate/ghcr.io-onedr0p-prowlarr-develop-1.x into main
Reviewed-on: #755
2024-10-20 19:35:19 -05:00
ef47408716 Merge pull request 'Update image ghcr.io/onedr0p/radarr-develop to v5.13.0.9361' (#756) from renovate/ghcr.io-onedr0p-radarr-develop-5.x into main
Reviewed-on: #756
2024-10-20 19:35:12 -05:00
aadd06696e Merge pull request 'Update image ghcr.io/autobrr/autobrr to v1.48.0' (#757) from renovate/ghcr.io-autobrr-autobrr-1.x into main
Reviewed-on: #757
2024-10-20 19:35:00 -05:00
19ca047ca1 Merge pull request 'Update image 1337kavin/piped-proxy to af08706' (#758) from renovate/1337kavin-piped-proxy-latest into main
Reviewed-on: #758
2024-10-20 19:34:07 -05:00
7a94a4f790 Merge pull request 'Update image ghcr.io/grafana/helm-charts/grafana to v8.5.8' (#759) from renovate/grafana-monorepo into main
Reviewed-on: #759
2024-10-20 19:33:54 -05:00
b1820e8aea Update image ghcr.io/grafana/helm-charts/grafana to v8.5.8 2024-10-21 00:32:41 +00:00
62b671125f Update image 1337kavin/piped-proxy to af08706 2024-10-21 00:32:33 +00:00
c6de327c75
add grafana oci repo 2024-10-20 19:20:01 -05:00
a2414374df Update image ghcr.io/autobrr/autobrr to v1.48.0 2024-10-20 21:32:35 +00:00
4d18b83b81 Update image ghcr.io/onedr0p/sabnzbd to 86c645d 2024-10-20 20:32:32 +00:00
6966e74fa6
add node-exporter 2024-10-20 12:07:53 -05:00
d3692d298a
add oci prom-community 2024-10-20 12:06:01 -05:00
22fbadfade Update image ghcr.io/onedr0p/radarr-develop to v5.13.0.9361 2024-10-20 12:32:40 +00:00
91694926a7 Update image ghcr.io/onedr0p/prowlarr-develop to v1.25.3.4815 2024-10-20 12:32:34 +00:00
e8892fa22a Update image ghcr.io/cross-seed/cross-seed to v6.0.0-43 2024-10-20 05:02:35 +00:00
e3167d2370
add vector-agent 2024-10-19 19:49:25 -05:00
1594910dd2
update resource path 2024-10-19 18:41:01 -05:00
9a8e6b6291
nice 2024-10-19 18:38:24 -05:00
84ed40827f
test 2024-10-19 18:37:13 -05:00
b6dc54e192
reloader to operator 2024-10-19 18:35:21 -05:00
9384099003
add vector-aggregator 2024-10-19 18:21:31 -05:00
3dd13b051c
remove dangling comma 2024-10-19 18:12:32 -05:00
aada4fd258
debug 2024-10-19 17:52:38 -05:00
86a9bfd1cd
revert 2024-10-19 17:38:50 -05:00
612714efd6
comments not supported 2024-10-19 17:01:14 -05:00
845443720d
revert and disable acl in conf 2024-10-19 16:59:59 -05:00
e5a944afb8
remove acl until I can correct the rules. 2024-10-19 16:54:06 -05:00
ebbbbbc33c Merge pull request 'Update image ghcr.io/mendhak/http-https-echo to v35' (#752) from renovate/ghcr.io-mendhak-http-https-echo-35.x into main
Reviewed-on: #752
2024-10-19 16:43:33 -05:00
b096523f33 Merge pull request 'Update image ghcr.io/bjw-s-labs/piped-frontend to v2024.10.19' (#753) from renovate/ghcr.io-bjw-s-labs-piped-frontend-2024.x into main
Reviewed-on: #753
2024-10-19 16:43:20 -05:00
848f1a545a
standardize app name label 2024-10-19 16:38:02 -05:00
9e567aac0d Update image ghcr.io/bjw-s-labs/piped-frontend to v2024.10.19 2024-10-19 18:32:30 +00:00
767b4624d8 Update image ghcr.io/mendhak/http-https-echo to v35 2024-10-19 16:02:33 +00:00
be4818bbb7
update helmrepository schema and add vector chart repo 2024-10-18 18:53:42 -05:00
4a4089be7d
update ingress 2024-10-18 17:23:55 -05:00
fba72f86ef
correct repo 2024-10-18 17:19:02 -05:00
cddce8ed5b
add victoria logs 2024-10-18 17:18:23 -05:00
f5597e33c7
update ratios 2024-10-18 14:34:24 -05:00
b54324d594 Merge pull request 'Update image ghcr.io/onedr0p/home-assistant to v2024.10.3' (#751) from renovate/ghcr.io-onedr0p-home-assistant-2024.x into main
Reviewed-on: #751
2024-10-18 11:42:04 -05:00
43d9ce9593 Update image ghcr.io/onedr0p/home-assistant to v2024.10.3 2024-10-18 16:32:33 +00:00
82cf626562
silence clock skew 2024-10-17 21:32:33 -05:00
23f4d92e50 Merge pull request 'Update image ghcr.io/dragonflydb/dragonfly to v1.24.0' (#742) from renovate/ghcr.io-dragonflydb-dragonfly-1.x into main
Reviewed-on: #742
2024-10-17 21:15:07 -05:00
7861e27b6b Merge pull request 'Update image 1337kavin/piped-proxy to 5d069df' (#749) from renovate/1337kavin-piped-proxy-latest into main
Reviewed-on: #749
2024-10-17 21:14:33 -05:00
2a3e7139fc Merge pull request 'Update image pgo to v5.7.0' (#746) from renovate/pgo-5.x into main
Reviewed-on: #746
2024-10-17 21:14:20 -05:00
c5b36d8ce2
up to two replicas 2024-10-17 20:59:00 -05:00
b48e4a02c2 Merge pull request 'Update chart emqx-operator to 2.2.25' (#744) from renovate/emqx-operator-2.x into main
Reviewed-on: #744
2024-10-17 20:53:49 -05:00
0cf1087754 Merge pull request 'Update image ghcr.io/zwave-js/zwave-js-ui to v9.24.0' (#747) from renovate/ghcr.io-zwave-js-zwave-js-ui-9.x into main
Reviewed-on: #747
2024-10-17 20:53:39 -05:00
4214515c6a Merge pull request 'Update image ghcr.io/koush/scrypted to v0.121.0' (#748) from renovate/ghcr.io-koush-scrypted-0.x into main
Reviewed-on: #748
2024-10-17 20:47:29 -05:00
acc8c0b920
add bjw-s-labs to automerge digests 2024-10-17 20:45:29 -05:00
5cde98f529
refactor security context and add proper version to piped frontend 2024-10-17 20:42:10 -05:00
dde7c66b70 Merge pull request 'Update Rook Ceph group to v1.15.4 (patch)' (#750) from renovate/patch-rook-ceph into main
Reviewed-on: #750
2024-10-17 20:20:11 -05:00
acf7bc72f0 Update Rook Ceph group to v1.15.4 2024-10-17 21:03:18 +00:00
302bdd77b2 Update image 1337kavin/piped-proxy to 5d069df 2024-10-17 21:03:14 +00:00
cf3cd03a04 Update image ghcr.io/onedr0p/home-assistant to 65cdf47 2024-10-17 20:03:01 +00:00
368dfad63f Update image ghcr.io/koush/scrypted to v0.121.0 2024-10-17 19:33:02 +00:00
82eb531702 Update image ghcr.io/zwave-js/zwave-js-ui to v9.24.0 2024-10-17 14:03:14 +00:00
867f6a97ea Update image pgo to v5.7.0 2024-10-17 13:33:33 +00:00
db791c40c3 Update chart emqx-operator to 2.2.25 2024-10-17 09:02:38 +00:00
f65c3bb2b6 Merge pull request 'Update image ghcr.io/cross-seed/cross-seed to v6.0.0-42' (#738) from renovate/ghcr.io-cross-seed-cross-seed-6.x into main
Reviewed-on: #738
2024-10-16 17:22:15 -05:00
0582ccd81d Merge pull request 'Update chart node-feature-discovery to 0.16.5' (#739) from renovate/node-feature-discovery-0.x into main
Reviewed-on: #739
2024-10-16 17:21:56 -05:00
16b79d9447 Merge pull request 'Update image 1337kavin/piped-proxy to 38e511b' (#743) from renovate/1337kavin-piped-proxy-latest into main
Reviewed-on: #743
2024-10-16 17:18:49 -05:00
bc7e3294df
Disk keeps swapping on boot. Since it's the only hdd /dev/sda will do. 2024-10-16 17:09:13 -05:00
68cdf5531e Update image 1337kavin/piped-proxy to 38e511b 2024-10-16 19:02:53 +00:00
dbb62d28eb Update image ghcr.io/dragonflydb/dragonfly to v1.24.0 2024-10-16 06:02:54 +00:00
0bf3a2e727 Merge pull request 'Update chart cilium to 1.16.3' (#741) from renovate/patch-cilium into main
Reviewed-on: #741
2024-10-15 06:49:36 -05:00
362ffcdccc Merge pull request 'Update chart kubelet-csr-approver to 1.2.3' (#740) from renovate/kubelet-csr-approver-1.x into main
Reviewed-on: #740
2024-10-15 06:48:51 -05:00
83e86b4b23 Update chart kubelet-csr-approver to 1.2.3 2024-10-15 11:38:17 +00:00
75a288c381 Merge pull request 'Update image ghcr.io/zwave-js/zwave-js-ui to v9.23.0' (#737) from renovate/ghcr.io-zwave-js-zwave-js-ui-9.x into main
Reviewed-on: #737
2024-10-15 06:35:25 -05:00
5e23e0fddd Merge pull request 'Update image app-template to v3.5.1' (#735) from renovate/app-template-3.x into main
Reviewed-on: #735
2024-10-15 06:35:12 -05:00
857d5f9f25 Merge pull request 'Update image public.ecr.aws/emqx/emqx to v5.8.1' (#733) from renovate/public.ecr.aws-emqx-emqx-5.x into main
Reviewed-on: #733
2024-10-15 06:13:46 -05:00
be59ac6eb6 Merge pull request 'Update image spegel to v0.0.27' (#734) from renovate/spegel-0.x into main
Reviewed-on: #734
2024-10-15 06:11:53 -05:00
491639f911 Update chart cilium to 1.16.3 2024-10-15 09:33:02 +00:00
5e8a66dbf9 Update chart node-feature-discovery to 0.16.5 2024-10-15 06:32:17 +00:00
af1d0827c1 Update image ghcr.io/cross-seed/cross-seed to v6.0.0-42 2024-10-15 01:02:42 +00:00
11470b3ddd
unbreak json 2024-10-14 12:10:57 -05:00
f0d3933cd0 revert talosconfig change
revert Merge pull request 'add iot vlan to each talos node' (#736) from add-iot-interface-to-all-talos-nodes into main

Reviewed-on: #736
2024-10-14 11:36:37 -05:00
21394584fe Update image ghcr.io/zwave-js/zwave-js-ui to v9.23.0 2024-10-14 16:32:37 +00:00
eb0eacf99f Merge pull request 'add iot vlan to each talos node' (#736) from add-iot-interface-to-all-talos-nodes into main
Reviewed-on: #736
2024-10-14 11:11:23 -05:00
94bee873e7
add iot vlan to each talos node 2024-10-14 11:10:58 -05:00
a567eda576 Update image app-template to v3.5.1 2024-10-14 16:02:26 +00:00
d6199e8db2 Update image spegel to v0.0.27 2024-10-14 15:02:56 +00:00
4154700932 Update image public.ecr.aws/emqx/emqx to v5.8.1 2024-10-14 15:02:45 +00:00
279096ea11 Merge pull request 'update helmrelease schemas' (#732) from update-helmrelease-schemas into main
Reviewed-on: #732
2024-10-14 09:29:54 -05:00
9fed1b350f
update helmrelease schemas 2024-10-14 09:26:12 -05:00
e45976ebf1 Merge pull request 'Update HelmRelease to helm.toolkit.fluxcd.io/v2' (#725) from renovate/helmrelease-2.x into main
Reviewed-on: #725
2024-10-14 09:12:03 -05:00
19c1d0d618 Merge pull request 'Update image ghcr.io/onedr0p/prowlarr-develop to v1.25.2.4794' (#730) from renovate/ghcr.io-onedr0p-prowlarr-develop-1.x into main
Reviewed-on: #730
2024-10-14 09:05:33 -05:00
5e51ebcb9c Merge pull request 'Update image ghcr.io/tautulli/tautulli to v2.14.6' (#731) from renovate/ghcr.io-tautulli-tautulli-2.x into main
Reviewed-on: #731
2024-10-14 09:05:13 -05:00
3c8e5baa7b Update image ghcr.io/tautulli/tautulli to v2.14.6 2024-10-14 14:04:22 +00:00
7cfc65d647 Update image ghcr.io/onedr0p/prowlarr-develop to v1.25.2.4794 2024-10-14 14:04:17 +00:00
b0063fe8c4 Merge pull request 'Update image ghcr.io/cross-seed/cross-seed to v6.0.0-41' (#728) from renovate/ghcr.io-cross-seed-cross-seed-6.x into main
Reviewed-on: #728
2024-10-14 09:01:06 -05:00
9e94135f55 Merge pull request 'Update image ghcr.io/koush/scrypted to v0.119.2' (#729) from renovate/ghcr.io-koush-scrypted-0.x into main
Reviewed-on: #729
2024-10-14 08:43:36 -05:00
6abe2b9c4b Update image ghcr.io/koush/scrypted to v0.119.2 2024-10-14 13:04:05 +00:00
fd8eb9cf19 Update image ghcr.io/cross-seed/cross-seed to v6.0.0-41 2024-10-14 13:03:57 +00:00
fbe5c55308 Merge pull request 'Update image 1337kavin/piped-proxy to c497d70' (#726) from renovate/1337kavin-piped-proxy-latest into main
Reviewed-on: #726
2024-10-14 07:45:37 -05:00
9a0afa2aa4 Merge pull request 'Update image ghcr.io/buroa/qbtools to v0.19.3' (#727) from renovate/ghcr.io-buroa-qbtools-0.x into main
Reviewed-on: #727
2024-10-14 07:45:29 -05:00
877380899e Merge pull request 'Update chart ingress-nginx to 4.11.3' (#710) from renovate/ingress-nginx-4.x into main
Reviewed-on: #710
2024-10-14 07:45:02 -05:00
93afdb3fe7 Merge pull request 'Update image docker.io/cloudflare/cloudflared to v2024.10.0' (#713) from renovate/docker.io-cloudflare-cloudflared-2024.x into main
Reviewed-on: #713
2024-10-14 07:43:43 -05:00
d85993b354 Merge pull request 'Update image docker.io/ollama/ollama to v0.3.13' (#717) from renovate/docker.io-ollama-ollama-0.x into main
Reviewed-on: #717
2024-10-14 07:43:29 -05:00
750b19f1e5 Update image ghcr.io/buroa/qbtools to v0.19.3 2024-10-14 12:33:17 +00:00
5f034598a9 Update image 1337kavin/piped-proxy to c497d70 2024-10-14 12:33:07 +00:00
6aea997c48 Merge pull request 'Update image ghcr.io/zwave-js/zwave-js-ui to v9.22.0' (#718) from renovate/ghcr.io-zwave-js-zwave-js-ui-9.x into main
Reviewed-on: #718
2024-10-14 07:30:07 -05:00
c05674b76b Merge pull request 'Update image quay.io/redlib/redlib to f07a153' (#716) from renovate/quay.io-redlib-redlib-latest into main
Reviewed-on: #716
2024-10-14 07:29:47 -05:00
28d581634d Merge pull request 'Update image ghcr.io/onedr0p/radarr-develop to v5.12.2.9335' (#712) from renovate/ghcr.io-onedr0p-radarr-develop-5.x into main
Reviewed-on: #712
2024-10-14 07:29:37 -05:00
5e8add9c86 Merge pull request 'Update image ghcr.io/bjw-s-labs/piped-frontend to 07ab90a' (#714) from renovate/ghcr.io-bjw-s-labs-piped-frontend-latest into main
Reviewed-on: #714
2024-10-14 07:27:49 -05:00
338004fa0a Update image ghcr.io/bjw-s-labs/piped-frontend to 07ab90a 2024-10-13 16:35:09 +00:00
a7b8662796
include bond interfaces 2024-10-12 17:11:06 -05:00
76e7901a2f
update ip 2024-10-12 16:07:26 -05:00
4f604ba608
debug 2024-10-12 16:00:37 -05:00
de94de0b2a
add ports 2024-10-12 15:45:17 -05:00
4bc53661ad
remove extra service 2024-10-12 14:27:34 -05:00
f1fdda6bdd
gatus fixup -- remove services/servers not used 2024-10-12 14:24:54 -05:00
6aaf58e8be Update HelmRelease to helm.toolkit.fluxcd.io/v2 2024-10-12 17:03:18 +00:00
ae41bd8a6e Merge pull request 'Update image ghcr.io/onedr0p/home-assistant to v2024.10.2' (#724) from renovate/ghcr.io-onedr0p-home-assistant-2024.x into main
Reviewed-on: #724
2024-10-12 11:35:28 -05:00
a894c9932b Update image ghcr.io/onedr0p/radarr-develop to v5.12.2.9335 2024-10-12 16:33:17 +00:00
134cc34515 Update image quay.io/redlib/redlib to f07a153 2024-10-12 16:03:04 +00:00
52a4fc077b
add rebroadcast ports and swap to nvidia container. 2024-10-12 07:16:22 -05:00
5051f5b6f4
update mount names 2024-10-12 06:42:46 -05:00
587565c0ed
correct pvc name 2024-10-11 20:06:09 -05:00
ba526c130b
correct namespace 2024-10-11 19:58:29 -05:00
c7037694fa
add scrypted 2024-10-11 19:57:15 -05:00
45d91c392d
update toolpod 2024-10-11 19:54:48 -05:00
acba2f290f
swap deps 2024-10-11 18:47:50 -05:00
aa7119a6e4
add alert manager silencer back 2024-10-11 18:44:08 -05:00
b56314020a
remove retryInterval on all flux 2024-10-11 18:25:37 -05:00
d67ed006ca
update to talos v1.8.1 2024-10-11 18:12:51 -05:00
d0d86351c1
add google coral detection 2024-10-11 18:12:26 -05:00
1ee483d322 Update image ghcr.io/onedr0p/home-assistant to v2024.10.2 2024-10-11 17:33:38 +00:00
efb553e50b
remove old code -- different hardware now supported. 2024-10-11 12:17:21 -05:00
487976e388
{}!!!!!!!!! 2024-10-11 10:53:39 -05:00
7c8802e3bf
remove pod binding policy filters for schematic-to-pod policy to work 2024-10-11 10:31:13 -05:00
e906b8239d Update image ghcr.io/zwave-js/zwave-js-ui to v9.22.0 2024-10-11 10:03:26 +00:00
e6b1302167 Update image docker.io/ollama/ollama to v0.3.13 2024-10-10 23:05:34 +00:00
4ec7a417e7 Update image docker.io/cloudflare/cloudflared to v2024.10.0 2024-10-10 12:03:51 +00:00
fdc61be74e Update chart ingress-nginx to 4.11.3 2024-10-10 11:01:05 +00:00
218 changed files with 6716 additions and 606 deletions

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -3,9 +3,12 @@
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: vault
name: &app vault
namespace: flux-system
spec:
commonMetadata:
labels:
app.kubernetes.io/name: *app
interval: 1m
path: "./kubernetes/apps/security/vault/app"
prune: true

View file

@ -7,7 +7,7 @@
"automerge": true,
"automergeType": "branch",
"matchUpdateTypes": ["digest"],
"matchPackagePrefixes": ["ghcr.io/onedr0p", "ghcr.io/bjw-s"],
"matchPackagePrefixes": ["ghcr.io/onedr0p", "ghcr.io/bjw-s", "ghcr.io/bjw-s-labs"],
"ignoreTests": true
},
{

View file

@ -1,6 +1,6 @@
---
# yaml-language-server: $schema=https://taskfile.dev/schema.json
version: "3"
version: '3'
# This taskfile is used to manage certain VolSync tasks for a given application, limitations are described below.
# 1. Fluxtomization, HelmRelease, PVC, ReplicationSource all have the same name (e.g. plex)
@ -8,215 +8,129 @@ version: "3"
# 3. Applications are deployed as either a Kubernetes Deployment or StatefulSet
# 4. Each application only has one PVC that is being replicated
x-env-vars: &env-vars
app: "{{.app}}"
claim: "{{.claim}}"
controller: "{{.controller}}"
job: "{{.job}}"
ns: "{{.ns}}"
pgid: "{{.pgid}}"
previous: "{{.previous}}"
puid: "{{.puid}}"
vars:
VOLSYNC_RESOURCES_DIR: "{{.ROOT_DIR}}/.taskfiles/volsync/resources"
VOLSYNC_RESOURCES_DIR: '{{.ROOT_DIR}}/.taskfiles/volsync/resources'
tasks:
state-*:
desc: Suspend or Resume Volsync
summary: |
state: resume or suspend (required)
dotenv: ['{{.VOLSYNC_RESOURCES_DIR}}/.env']
summary: |-
CLUSTER: Cluster to run command against (default: main)
STATE: resume or suspend (required)
cmds:
- flux --context $CLUSTER {{.state}} kustomization volsync
- flux --context $CLUSTER -n {{.ns}} {{.state}} helmrelease volsync
- kubectl --context $CLUSTER -n {{.ns}} scale deployment volsync --replicas {{if eq "suspend" .state}}0{{else}}1{{end}}
env: *env-vars
# - until kubectl wait jobs --all --all-namespaces --for=condition=complete --timeout=5m &>/dev/null; do sleep 5; done
- flux {{.STATE}} kustomization volsync
- flux --namespace {{.NS}} {{.STATE}} helmrelease volsync
- kubectl --namespace {{.NS}} scale deployment --all --replicas {{if eq .STATE "suspend"}}0{{else}}1{{end}}
vars:
ns: '{{.ns | default "volsync-system"}}'
state: '{{index .MATCH 0}}'
list:
desc: List snapshots for an application
summary: |
ns: Namespace the PVC is in (default: default)
app: Application to list snapshots for (required)
dotenv: ['{{.VOLSYNC_RESOURCES_DIR}}/.env']
cmds:
- /etc/profiles/per-user/jahanson/bin/envsubst < <(cat {{.VOLSYNC_RESOURCES_DIR}}/list.tmpl.yaml) | kubectl --context $CLUSTER apply -f -
- bash {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh {{.job}} {{.ns}} $CLUSTER
- kubectl --context $CLUSTER -n {{.ns}} wait job/{{.job}} --for condition=complete --timeout=1m
- kubectl --context $CLUSTER -n {{.ns}} logs job/{{.job}} --container main
- kubectl --context $CLUSTER -n {{.ns}} delete job {{.job}}
env: *env-vars
NS: '{{.NS | default "volsync-system"}}'
STATE: '{{index .MATCH 0}}'
requires:
vars: ["app"]
vars:
ns: '{{.ns | default "default"}}'
job: volsync-list-{{.app}}
preconditions:
- test -f /etc/profiles/per-user/jahanson/bin/envsubst
- test -f {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh
- test -f {{.VOLSYNC_RESOURCES_DIR}}/list.tmpl.yaml
silent: true
vars: [CLUSTER]
unlock:
desc: Unlock a Restic repository for an application
summary: |
ns: Namespace the PVC is in (default: default)
app: Application to unlock (required)
dotenv: ['{{.VOLSYNC_RESOURCES_DIR}}/.env']
cmds:
- /etc/profiles/per-user/jahanson/bin/envsubst < <(cat {{.VOLSYNC_RESOURCES_DIR}}/unlock.tmpl.yaml) | kubectl --context $CLUSTER apply -f -
- bash {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh {{.job}} {{.ns}} $CLUSTER
- kubectl --context $CLUSTER -n {{.ns}} wait job/{{.job}} --for condition=complete --timeout=1m
- kubectl --context $CLUSTER -n {{.ns}} logs job/{{.job}} --container minio
- kubectl --context $CLUSTER -n {{.ns}} logs job/{{.job}} --container r2
- kubectl --context $CLUSTER -n {{.ns}} delete job {{.job}}
env: *env-vars
desc: Unlock all Restic repositories
summary: |-
CLUSTER: Cluster to run command against (default: main)
cmd: >
kubectl get replicationsources --all-namespaces --no-headers -A | awk '{print $1, $2}'
| xargs --max-procs=2 -l bash -c 'kubectl --namespace "$0" patch --field-manager=flux-client-side-apply replicationsources "$1" --type merge --patch "{\"spec\":{\"restic\":{\"unlock\":\"{{now | unixEpoch}}\"}}}"'
requires:
vars: ["app"]
vars:
ns: '{{.ns | default "default"}}'
job: volsync-unlock-{{.app}}
preconditions:
- test -f /etc/profiles/per-user/jahanson/bin/envsubst
- test -f {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh
- test -f {{.VOLSYNC_RESOURCES_DIR}}/unlock.tmpl.yaml
silent: true
vars: [CLUSTER]
# To run backup jobs in parallel for all replicationsources:
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task volsync:snapshot app=$0 ns=$1'
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task volsync:snapshot APP=$0 NS=$1'
snapshot:
desc: Snapshot a PVC for an application
summary: |
cluster: Cluster to run command against (required)
ns: Namespace the PVC is in (default: default)
app: Application to snapshot (required)
desc: Snapshot an application
summary: |-
CLUSTER: Cluster to run command against (default: main)
NS: Namespace the application is in (default: default)
APP: Application to snapshot (required)
cmds:
- kubectl --context {{.cluster}} -n {{.ns}} patch replicationsources {{.app}} --type merge -p '{"spec":{"trigger":{"manual":"{{.now}}"}}}'
- bash {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh {{.job}} {{.ns}} {{.cluster}}
- kubectl --context {{.cluster}} -n {{.ns}} wait job/{{.job}} --for condition=complete --timeout=120m
env: *env-vars
requires:
vars: ["cluster", "app"]
- kubectl --namespace {{.NS}} patch replicationsources {{.APP}} --type merge -p '{"spec":{"trigger":{"manual":"{{now | unixEpoch}}"}}}'
- until kubectl --namespace {{.NS}} get job/{{.JOB}} &>/dev/null; do sleep 5; done
- kubectl --namespace {{.NS}} wait job/{{.JOB}} --for=condition=complete --timeout=120m
vars:
now: '{{now | date "150405"}}'
ns: '{{.ns | default "default"}}'
job: volsync-src-{{.app}}
controller:
sh: true && {{.VOLSYNC_RESOURCES_DIR}}/which-controller.sh {{.app}} {{.ns}} {{.cluster}}
NS: '{{.NS | default "default"}}'
JOB: volsync-src-{{.APP}}
requires:
vars: [CLUSTER, APP]
preconditions:
- test -f {{.VOLSYNC_RESOURCES_DIR}}/which-controller.sh
- test -f {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh
- kubectl --context {{.cluster}} -n {{.ns}} get replicationsources {{.app}}
- kubectl --namespace {{.NS}} get replicationsources {{.APP}}
# To run restore jobs in parallel for all replicationdestinations:
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task volsync:restore app=$0 ns=$1'
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task volsync:restore APP=$0 NS=$1'
restore:
desc: Restore a PVC for an application
summary: |
cluster: Cluster to run command against (required)
ns: Namespace the PVC is in (default: default)
app: Application to restore (required)
previous: Previous number of snapshots to restore (default: 2)
desc: Restore an application
summary: |-
CLUSTER: Cluster to run command against (default: main)
NS: Namespace the application is in (default: default)
APP: Application to restore (required)
PREVIOUS: Previous number of snapshots to restore (default: 2)
cmds:
- { task: .suspend, vars: *env-vars }
- { task: .wipe, vars: *env-vars }
- { task: .restore, vars: *env-vars }
- { task: .resume, vars: *env-vars }
env: *env-vars
- task: .suspend
- task: .restore
- task: .resume
requires:
vars: ["cluster", "app"]
vars:
ns: '{{.ns | default "default"}}'
previous: '{{.previous | default 2}}'
controller:
sh: "{{.VOLSYNC_RESOURCES_DIR}}/which-controller.sh {{.app}} {{.ns}}"
claim:
sh: kubectl --context {{.cluster}} -n {{.ns}} get replicationsources/{{.app}} -o jsonpath="{.spec.sourcePVC}"
puid:
sh: kubectl --context {{.cluster}} -n {{.ns}} get replicationsources/{{.app}} -o jsonpath="{.spec.restic.moverSecurityContext.runAsUser}"
pgid:
sh: kubectl --context {{.cluster}} -n {{.ns}} get replicationsources/{{.app}} -o jsonpath="{.spec.restic.moverSecurityContext.runAsGroup}"
preconditions:
- test -f {{.VOLSYNC_RESOURCES_DIR}}/which-controller.sh
vars: [CLUSTER, APP]
cleanup:
desc: Delete volume populator PVCs in all namespaces
summary: |
cluster: Cluster to run command against (required)
cmds:
- for: { var: dest }
cmd: |
{{- $items := (split "/" .ITEM) }}
kubectl --context {{.cluster}} delete pvc -n {{ $items._0 }} {{ $items._1 }}
- for: { var: cache }
cmd: |
{{- $items := (split "/" .ITEM) }}
kubectl --context {{.cluster}} delete pvc -n {{ $items._0 }} {{ $items._1 }}
- for: { var: snaps }
cmd: |
{{- $items := (split "/" .ITEM) }}
kubectl --context {{.cluster}} delete volumesnapshot -n {{ $items._0 }} {{ $items._1 }}
env: *env-vars
requires:
vars: ["cluster"]
vars:
dest:
sh: kubectl --context {{.cluster}} get pvc --all-namespaces --no-headers | grep "dst-dest" | awk '{print $1 "/" $2}'
cache:
sh: kubectl --context {{.cluster}} get pvc --all-namespaces --no-headers | grep "dst-cache" | awk '{print $1 "/" $2}'
snaps:
sh: kubectl --context {{.cluster}} get volumesnapshot --all-namespaces --no-headers | grep "dst-dest" | awk '{print $1 "/" $2}'
# Suspend the Flux ks and hr
.suspend:
internal: true
cmds:
- flux --context {{.cluster}} -n flux-system suspend kustomization {{.app}}
- flux --context {{.cluster}} -n {{.ns}} suspend helmrelease {{.app}}
- kubectl --context {{.cluster}} -n {{.ns}} scale {{.controller}} --replicas 0
- kubectl --context {{.cluster}} -n {{.ns}} wait pod --for delete --selector="app.kubernetes.io/name={{.app}}" --timeout=2m
env: *env-vars
# Wipe the PVC of all data
.wipe:
internal: true
cmds:
- /etc/profiles/per-user/jahanson/bin/envsubst < <(cat {{.VOLSYNC_RESOURCES_DIR}}/wipe.tmpl.yaml) | kubectl --context {{.cluster}} apply -f -
- bash {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh {{.job}} {{.ns}} {{.cluster}}
- kubectl --context {{.cluster}} -n {{.ns}} wait job/{{.job}} --for condition=complete --timeout=120m
- kubectl --context {{.cluster}} -n {{.ns}} logs job/{{.job}} --container main
- kubectl --context {{.cluster}} -n {{.ns}} delete job {{.job}}
env: *env-vars
- flux --namespace flux-system suspend kustomization {{.APP}}
- flux --namespace {{.NS}} suspend helmrelease {{.APP}}
- kubectl --namespace {{.NS}} scale {{.CONTROLLER}}/{{.APP}} --replicas 0
- kubectl --namespace {{.NS}} wait pod --for=delete --selector="app.kubernetes.io/name={{.APP}}" --timeout=5m
vars:
job: volsync-wipe-{{.app}}
preconditions:
- test -f /etc/profiles/per-user/jahanson/bin/envsubst
- test -f {{.VOLSYNC_RESOURCES_DIR}}/wipe.tmpl.yaml
- test -f {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh
NS: '{{.NS | default "default"}}'
APP: '{{.APP}}'
CONTROLLER:
sh: kubectl --namespace {{.NS}} get deployment {{.APP}} &>/dev/null && echo deployment || echo statefulset
# Create VolSync replicationdestination CR to restore data
.restore:
internal: true
cmds:
- /etc/profiles/per-user/jahanson/bin/envsubst < <(cat {{.VOLSYNC_RESOURCES_DIR}}/replicationdestination.tmpl.yaml) | kubectl --context {{.cluster}} apply -f -
- bash {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh {{.job}} {{.ns}} {{.cluster}}
- kubectl --context {{.cluster}} -n {{.ns}} wait job/{{.job}} --for condition=complete --timeout=120m
- kubectl --context {{.cluster}} -n {{.ns}} delete replicationdestination {{.job}}
env: *env-vars
- minijinja-cli --env --trim-blocks --lstrip-blocks --autoescape=none {{.VOLSYNC_RESOURCES_DIR}}/replicationdestination.yaml.j2 | kubectl apply --server-side --filename -
- until kubectl --namespace {{.NS}} get job/{{.JOB}} &>/dev/null; do sleep 5; done
- kubectl --namespace {{.NS}} wait job/{{.JOB}} --for=condition=complete --timeout=120m
- kubectl --namespace {{.NS}} delete replicationdestination {{.JOB}}
vars:
job: volsync-dst-{{.app}}
NS: '{{.NS | default "default"}}'
JOB: volsync-dst-{{.APP}}
PREVIOUS: '{{.PREVIOUS | default 2}}'
CLAIM:
sh: kubectl --namespace {{.NS}} get replicationsources/{{.APP}} --output=jsonpath="{.spec.sourcePVC}"
ACCESS_MODES:
sh: kubectl --namespace {{.NS}} get replicationsources/{{.APP}} --output=jsonpath="{.spec.restic.accessModes}"
STORAGE_CLASS_NAME:
sh: kubectl --namespace {{.NS}} get replicationsources/{{.APP}} --output=jsonpath="{.spec.restic.storageClassName}"
PUID:
sh: kubectl --namespace {{.NS}} get replicationsources/{{.APP}} --output=jsonpath="{.spec.restic.moverSecurityContext.runAsUser}"
PGID:
sh: kubectl --namespace {{.NS}} get replicationsources/{{.APP}} --output=jsonpath="{.spec.restic.moverSecurityContext.runAsGroup}"
env:
NS: '{{.NS}}'
JOB: '{{.JOB}}'
APP: '{{.APP}}'
PREVIOUS: '{{.PREVIOUS}}'
CLAIM: '{{.CLAIM}}'
ACCESS_MODES: '{{.ACCESS_MODES}}'
STORAGE_CLASS_NAME: '{{.STORAGE_CLASS_NAME}}'
PUID: '{{.PUID}}'
PGID: '{{.PGID}}'
preconditions:
- test -f /etc/profiles/per-user/jahanson/bin/envsubst
- test -f {{.VOLSYNC_RESOURCES_DIR}}/replicationdestination.tmpl.yaml
- test -f {{.VOLSYNC_RESOURCES_DIR}}/wait-for-job.sh
- test -f {{.VOLSYNC_RESOURCES_DIR}}/replicationdestination.yaml.j2
# Resume Flux ks and hr
.resume:
internal: true
cmds:
- flux --context {{.cluster}} -n {{.ns}} resume helmrelease {{.app}}
- flux --context {{.cluster}} -n flux-system resume kustomization {{.app}}
env: *env-vars
- flux --namespace {{.NS}} resume helmrelease {{.APP}}
- flux --namespace flux-system resume kustomization {{.APP}}
- kubectl --namespace {{.NS}} scale {{.CONTROLLER}}/{{.APP}} --replicas 1
- kubectl --namespace {{.NS}} wait pod --for=condition=ready --selector="app.kubernetes.io/name={{.APP}}" --timeout=5m
vars:
NS: '{{.NS | default "default"}}'
APP: '{{.APP}}'
CONTROLLER:
sh: kubectl --namespace {{.NS}} get deployment {{.APP}} &>/dev/null && echo deployment || echo statefulset

View file

@ -1 +0,0 @@
CLUSTER=theshire

View file

@ -1,20 +0,0 @@
---
apiVersion: batch/v1
kind: Job
metadata:
name: ${job}
namespace: ${ns}
spec:
ttlSecondsAfterFinished: 3600
template:
spec:
automountServiceAccountToken: false
restartPolicy: OnFailure
containers:
- name: main
image: docker.io/restic/restic:latest
args: ["snapshots"]
envFrom:
- secretRef:
name: ${app}-volsync-r2-secret
resources: {}

View file

@ -1,31 +0,0 @@
---
apiVersion: volsync.backube/v1alpha1
kind: ReplicationDestination
metadata:
name: ${job}
namespace: ${ns}
spec:
trigger:
manual: restore-once
restic:
repository: ${app}-volsync-r2-secret
destinationPVC: ${claim}
copyMethod: Direct
storageClassName: ceph-block
# storageClassName: ceph-filesystem
# accessModes: ["ReadWriteMany"]
# IMPORTANT NOTE:
# Set to the last X number of snapshots to restore from
previous: ${previous}
# OR;
# IMPORTANT NOTE:
# On bootstrap set `restoreAsOf` to the time the old cluster was destroyed.
# This will essentially prevent volsync from trying to restore a backup
# from a application that started with default data in the PVC.
# Do not restore snapshots made after the following RFC3339 Timestamp.
# date --rfc-3339=seconds (--utc)
# restoreAsOf: "2022-12-10T16:00:00-05:00"
moverSecurityContext:
runAsUser: ${puid}
runAsGroup: ${pgid}
fsGroup: ${pgid}

View file

@ -0,0 +1,23 @@
---
apiVersion: volsync.backube/v1alpha1
kind: ReplicationDestination
metadata:
name: {{ ENV.JOB }}
namespace: {{ ENV.NS }}
spec:
trigger:
manual: restore-once
restic:
repository: {{ ENV.APP }}-volsync-secret
destinationPVC: {{ ENV.CLAIM }}
copyMethod: Direct
storageClassName: {{ ENV.STORAGE_CLASS_NAME }}
accessModes: {{ ENV.ACCESS_MODES }}
previous: {{ ENV.PREVIOUS }}
enableFileDeletion: true
cleanupCachePVC: true
cleanupTempPVC: true
moverSecurityContext:
runAsUser: {{ ENV.PUID }}
runAsGroup: {{ ENV.PGID }}
fsGroup: {{ ENV.PGID }}

View file

@ -1,27 +0,0 @@
---
apiVersion: batch/v1
kind: Job
metadata:
name: ${job}
namespace: ${ns}
spec:
ttlSecondsAfterFinished: 3600
template:
spec:
automountServiceAccountToken: false
restartPolicy: OnFailure
containers:
- name: minio
image: docker.io/restic/restic:latest
args: ["unlock", "--remove-all"]
envFrom:
- secretRef:
name: ${app}-volsync-secret
resources: {}
- name: r2
image: docker.io/restic/restic:latest
args: ["unlock", "--remove-all"]
envFrom:
- secretRef:
name: ${app}-volsync-r2-secret
resources: {}

View file

@ -1,14 +0,0 @@
#!/usr/bin/env bash
JOB=$1
NAMESPACE="${2:-default}"
CLUSTER="${3:-main}"
[[ -z "${JOB}" ]] && echo "Job name not specified" && exit 1
while true; do
STATUS="$(kubectl --context "${CLUSTER}" -n "${NAMESPACE}" get pod -l job-name="${JOB}" -o jsonpath='{.items[*].status.phase}')"
if [ "${STATUS}" == "Pending" ]; then
break
fi
sleep 1
done

View file

@ -1,22 +0,0 @@
#!/usr/bin/env bash
APP=$1
NAMESPACE="${2:-default}"
CLUSTER="${3:-theshire}"
is_deployment() {
kubectl --context "${CLUSTER}" -n "${NAMESPACE}" get deployment "${APP}" >/dev/null 2>&1
}
is_statefulset() {
kubectl --context "${CLUSTER}" -n "${NAMESPACE}" get statefulset "${APP}" >/dev/null 2>&1
}
if is_deployment; then
echo "deployment.apps/${APP}"
elif is_statefulset; then
echo "statefulset.apps/${APP}"
else
echo "No deployment or statefulset found for ${APP}"
exit 1
fi

View file

@ -1,26 +0,0 @@
---
apiVersion: batch/v1
kind: Job
metadata:
name: ${job}
namespace: ${ns}
spec:
ttlSecondsAfterFinished: 3600
template:
spec:
automountServiceAccountToken: false
restartPolicy: OnFailure
containers:
- name: main
image: docker.io/library/alpine:latest
command: ["/bin/sh", "-c", "cd /config; find . -delete"]
volumeMounts:
- name: config
mountPath: /config
securityContext:
privileged: true
resources: {}
volumes:
- name: config
persistentVolumeClaim:
claimName: ${claim}

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
@ -35,7 +35,7 @@ spec:
app:
image:
repository: docker.io/ollama/ollama
tag: 0.3.12
tag: 0.3.14
env:
- name: OLLAMA_HOST
value: 0.0.0.0

View file

@ -22,7 +22,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -0,0 +1,13 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/kustomization.json
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
# Pre Flux-Kustomizations
- ./namespace.yaml
# Flux-Kustomizations
# - ./jellyfin/ks.yaml # sqlite
# - ./jellyseerr/ks.yaml # sqlite
- ./radarr/ks.yaml # postgres
# - ./shoko/ks.yaml # sqlite
- ./sonarr/ks.yaml # postgres

View file

@ -0,0 +1,9 @@
---
apiVersion: v1
kind: Namespace
metadata:
name: anime
labels:
kustomize.toolkit.fluxcd.io/prune: disabled
volsync.backube/privileged-movers: "true"
pgo-enabled-hsn.dev: "true"

View file

@ -0,0 +1,46 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/external-secrets.io/externalsecret_v1beta1.json
apiVersion: external-secrets.io/v1beta1
kind: ExternalSecret
metadata:
name: radarr
spec:
secretStoreRef:
kind: ClusterSecretStore
name: onepassword-connect
target:
name: radarr-secret
template:
engineVersion: v2
data:
PUSHOVER_TOKEN: "{{ .radarr_token }}"
PUSHOVER_USER_KEY: "{{ .userkey_jahanson }}"
RADARR__AUTH__APIKEY: "{{ .api_key_anime }}"
dataFrom:
- extract:
key: pushover
- extract:
key: radarr
---
# yaml-language-server: $schema=https://ks.hsn.dev/external-secrets.io/externalsecret_v1beta1.json
apiVersion: external-secrets.io/v1beta1
kind: ExternalSecret
metadata:
name: radarr-db
spec:
secretStoreRef:
name: crunchy-pgo-secrets
kind: ClusterSecretStore
target:
name: radarr-db-secret
template:
engineVersion: v2
data:
RADARR__POSTGRES__HOST: "{{ index . \"pgbouncer-host\" }}"
RADARR__POSTGRES__USER: "{{ .user }}"
RADARR__POSTGRES__PASSWORD: "{{ .password }}"
RADARR__POSTGRES__PORT: "{{ .port }}"
RADARR__POSTGRES__MAINDB: "{{ .dbname }}"
dataFrom:
- extract:
key: postgres-pguser-radarr-anime

View file

@ -0,0 +1,119 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: &app radarr-anime
spec:
interval: 30m
chart:
spec:
chart: app-template
version: 3.5.1
sourceRef:
kind: HelmRepository
name: bjw-s
namespace: flux-system
install:
remediation:
retries: 3
upgrade:
cleanupOnFail: true
remediation:
retries: 3
strategy: rollback
values:
controllers:
radarr:
annotations:
reloader.stakater.com/auto: "true"
containers:
app:
image:
repository: ghcr.io/onedr0p/radarr-develop
tag: 5.13.0.9361
env:
RADARR__APP__INSTANCENAME: Radarr-Anime
RADARR__APP__THEME: dark
RADARR__AUTH__METHOD: External
RADARR__AUTH__REQUIRED: DisabledForLocalAddresses
RADARR__LOG__DBENABLED: "False"
RADARR__LOG__LEVEL: info
RADARR__SERVER__PORT: &port 80
RADARR__UPDATE__BRANCH: develop
TZ: America/Chicago
envFrom:
- secretRef:
name: radarr-secret
- secretRef:
name: radarr-db-secret
probes:
liveness: &probes
enabled: true
custom: true
spec:
httpGet:
path: /ping
port: *port
initialDelaySeconds: 0
periodSeconds: 10
timeoutSeconds: 1
failureThreshold: 3
readiness: *probes
startup:
enabled: false
securityContext:
allowPrivilegeEscalation: false
readOnlyRootFilesystem: true
capabilities: { drop: ["ALL"] }
resources:
requests:
cpu: 10m
limits:
memory: 6Gi
pod:
securityContext:
runAsUser: 568
runAsGroup: 568
runAsNonRoot: true
fsGroup: 568
fsGroupChangePolicy: OnRootMismatch
supplementalGroups: [10000]
service:
app:
controller: radarr
ports:
http:
port: *port
ingress:
app:
enabled: true
className: internal-nginx
hosts:
- host: &host "{{ .Release.Name }}.jahanson.tech"
paths:
- path: /
service:
identifier: app
port: http
tls:
- hosts:
- *host
persistence:
config:
enabled: true
existingClaim: *app
tmp:
type: emptyDir
media:
type: nfs
server: 10.1.1.13
path: /eru/media
globalMounts:
- path: /data/nas-media
moria-media:
type: nfs
server: 10.1.1.61
path: /moria/media/
globalMounts:
- path: /data/moria-media

View file

@ -0,0 +1,8 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/kustomization
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- ./externalsecret.yaml
- ./helmrelease.yaml
- ../../../../templates/volsync

View file

@ -0,0 +1,29 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: &app radarr-anime
namespace: flux-system
spec:
targetNamespace: anime
commonMetadata:
labels:
app.kubernetes.io/name: *app
dependsOn:
- name: crunchy-postgres-operator
- name: external-secrets-stores
- name: rook-ceph-cluster
- name: volsync
path: ./kubernetes/apps/anime/radarr/app
prune: true
sourceRef:
kind: GitRepository
name: theshire
wait: false
interval: 30m
timeout: 5m
postBuild:
substitute:
APP: *app
VOLSYNC_CAPACITY: 5Gi

View file

@ -0,0 +1,47 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/external-secrets.io/externalsecret_v1beta1.json
apiVersion: external-secrets.io/v1beta1
kind: ExternalSecret
metadata:
name: sonarr
spec:
refreshInterval: 1m
secretStoreRef:
kind: ClusterSecretStore
name: onepassword-connect
target:
name: sonarr-secret
template:
engineVersion: v2
data:
PUSHOVER_TOKEN: "{{ .sonarr_token }}"
PUSHOVER_USER_KEY: "{{ .userkey_jahanson }}"
SONARR__AUTH__APIKEY: "{{ .api_key_anime }}"
dataFrom:
- extract:
key: pushover
- extract:
key: sonarr
---
# yaml-language-server: $schema=https://ks.hsn.dev/external-secrets.io/externalsecret_v1beta1.json
apiVersion: external-secrets.io/v1beta1
kind: ExternalSecret
metadata:
name: sonarr-db
spec:
secretStoreRef:
name: crunchy-pgo-secrets
kind: ClusterSecretStore
target:
name: sonarr-db-secret
template:
engineVersion: v2
data:
SONARR__POSTGRES__HOST: "{{ index . \"pgbouncer-host\" }}"
SONARR__POSTGRES__USER: "{{ .user }}"
SONARR__POSTGRES__PASSWORD: "{{ .password }}"
SONARR__POSTGRES__PORT: "{{ .port }}"
SONARR__POSTGRES__MAINDB: "{{ .dbname }}"
dataFrom:
- extract:
key: postgres-pguser-sonarr-anime

View file

@ -0,0 +1,119 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: &app sonarr-anime
spec:
interval: 30m
chart:
spec:
chart: app-template
version: 3.5.1
sourceRef:
kind: HelmRepository
name: bjw-s
namespace: flux-system
install:
remediation:
retries: 3
upgrade:
cleanupOnFail: true
remediation:
retries: 3
strategy: rollback
values:
controllers:
sonarr:
annotations:
reloader.stakater.com/auto: "true"
containers:
app:
image:
repository: ghcr.io/onedr0p/sonarr-develop
tag: 4.0.9.2513
env:
SONARR__APP__INSTANCENAME: Sonarr-Anime
SONARR__APP__THEME: dark
SONARR__AUTH__METHOD: External
SONARR__AUTH__REQUIRED: DisabledForLocalAddresses
SONARR__LOG__DBENABLED: "False"
SONARR__LOG__LEVEL: info
SONARR__SERVER__PORT: &port 80
SONARR__UPDATE__BRANCH: develop
TZ: America/Chicago
envFrom:
- secretRef:
name: sonarr-secret
- secretRef:
name: sonarr-db-secret
probes:
liveness: &probes
enabled: true
custom: true
spec:
httpGet:
path: /ping
port: *port
initialDelaySeconds: 0
periodSeconds: 30
timeoutSeconds: 5
failureThreshold: 3
readiness: *probes
startup:
enabled: false
securityContext:
allowPrivilegeEscalation: false
readOnlyRootFilesystem: true
capabilities: { drop: ["ALL"] }
resources:
requests:
cpu: 10m
limits:
memory: 6Gi
pod:
securityContext:
runAsUser: 568
runAsGroup: 568
runAsNonRoot: true
fsGroup: 568
fsGroupChangePolicy: OnRootMismatch
supplementalGroups: [10000]
service:
app:
controller: sonarr
ports:
http:
port: *port
ingress:
main:
enabled: true
className: internal-nginx
hosts:
- host: &host "{{ .Release.Name }}.jahanson.tech"
paths:
- path: /
service:
identifier: app
port: http
tls:
- hosts:
- *host
persistence:
config:
enabled: true
existingClaim: *app
tmp:
type: emptyDir
media:
type: nfs
server: 10.1.1.13
path: /eru/media
globalMounts:
- path: /data/nas-media
moria-media:
type: nfs
server: 10.1.1.61
path: /moria/media/
globalMounts:
- path: /data/moria-media

View file

@ -0,0 +1,8 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/kustomization
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- ./externalsecret.yaml
- ./helmrelease.yaml
- ../../../../templates/volsync

View file

@ -0,0 +1,29 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: &app sonarr-anime
namespace: flux-system
spec:
targetNamespace: anime
commonMetadata:
labels:
app.kubernetes.io/name: *app
dependsOn:
- name: crunchy-postgres-operator
- name: external-secrets-stores
- name: volsync
- name: rook-ceph-cluster
path: ./kubernetes/apps/anime/sonarr/app
prune: true
sourceRef:
kind: GitRepository
name: theshire
wait: false
interval: 30m
timeout: 5m
postBuild:
substitute:
APP: *app
VOLSYNC_CAPACITY: 5Gi

View file

@ -3,9 +3,12 @@
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: cert-manager
name: &app cert-manager
namespace: flux-system
spec:
commonMetadata:
labels:
app.kubernetes.io/name: *app
interval: 10m
path: "./kubernetes/apps/cert-manager/cert-manager/app"
prune: true
@ -18,9 +21,12 @@ spec:
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: cert-manager-issuers
name: &app cert-manager-issuers
namespace: flux-system
spec:
commonMetadata:
labels:
app.kubernetes.io/name: *app
interval: 10m
path: "./kubernetes/apps/cert-manager/cert-manager/issuers"
prune: true

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -3,9 +3,12 @@
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: cert-manager-webhook-dnsimple
name: &app cert-manager-webhook-dnsimple
namespace: flux-system
spec:
commonMetadata:
labels:
app.kubernetes.io/name: *app
targetNamespace: cert-manager
interval: 10m
path: "./kubernetes/apps/cert-manager/webhook-dnsimple/app"

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -20,5 +20,4 @@ spec:
name: theshire
wait: true
interval: 30m
retryInterval: 1m
timeout: 5m

View file

@ -5,6 +5,7 @@ kind: Kustomization
resources:
- ./externalsecret.yaml
- ./gatus.yaml
- ./podmonitor.yaml
- ./postgrescluster.yaml
- ./pushsecret.yaml
- ./service.yaml

View file

@ -0,0 +1,38 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/monitoring.coreos.com/podmonitor_v1.json
apiVersion: monitoring.coreos.com/v1
kind: PodMonitor
metadata:
name: cpgo-postgres
spec:
jobLabel: cpgo-postgres
namespaceSelector:
matchNames:
- database
podMetricsEndpoints:
- honorLabels: true
path: /metrics
port: exporter
relabelings:
- sourceLabels:
[
"__meta_kubernetes_namespace",
"__meta_kubernetes_pod_label_postgres_operator_crunchydata_com_cluster",
]
targetLabel: pg_cluster
separator: "/"
replacement: "$1$2"
- sourceLabels:
[
__meta_kubernetes_pod_label_postgres_operator_crunchydata_com_instance,
]
targetLabel: deployment
- sourceLabels:
[__meta_kubernetes_pod_label_postgres_operator_crunchydata_com_role]
targetLabel: role
- sourceLabels: [__meta_kubernetes_pod_name]
targetLabel: instance
selector:
matchLabels:
postgres-operator.crunchydata.com/cluster: postgres
postgres-operator.crunchydata.com/crunchy-postgres-exporter: "true"

View file

@ -23,7 +23,13 @@ spec:
pgmonitor:
exporter:
# https://github.com/CrunchyData/postgres-operator-examples/blob/main/helm/install/values.yaml
image: registry.developers.crunchydata.com/crunchydata/crunchy-postgres-exporter:ubi8-0.15.0-3
# image: registry.developers.crunchydata.com/crunchydata/crunchy-postgres-exporter:ubi8-0.15.0-12
resources:
requests:
cpu: 10m
memory: 64M
limits:
memory: 512M
patroni: # turn on sync writes to at least 1 other replica
dynamicConfiguration:
@ -39,7 +45,7 @@ spec:
metadata:
labels:
app.kubernetes.io/name: crunchy-postgres
replicas: &replica 1
replicas: &replica 2
dataVolumeClaimSpec:
storageClassName: openebs-hostpath
accessModes:
@ -117,12 +123,22 @@ spec:
- radarr_main
password:
type: AlphaNumeric
- name: radarr-anime
databases:
- radarr_anime
password:
type: AlphaNumeric
- name: sonarr
databases:
- sonarr_logs
- sonarr_main
password:
type: AlphaNumeric
- name: sonarr-anime
databases:
- sonarr_anime
password:
type: AlphaNumeric
- name: jellyseerr
databases:
- jellyseerr
@ -177,6 +193,12 @@ spec:
pgBouncer:
port: 5432
replicas: *replica
service:
type: LoadBalancer
metadata:
annotations:
external-dns.alpha.kubernetes.io/hostname: pgbouncer.jahanson.tech
io.cilium/lb-ipam-ips: 10.1.1.36
metadata:
labels:
app.kubernetes.io/name: crunchy-postgres-pgbouncer

View file

@ -9,7 +9,7 @@ spec:
chart:
spec:
chart: pgo
version: 5.6.1
version: 5.7.0
sourceRef:
kind: HelmRepository
name: crunchydata

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/fluxcd-community/flux2-schemas/main/helmrepository-source-v1beta2.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/fluxcd-community/flux2-schemas/main/helmrepository-source-v1.json
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:

View file

@ -5,7 +5,7 @@ kind: Dragonfly
metadata:
name: dragonfly
spec:
image: ghcr.io/dragonflydb/dragonfly:v1.23.2
image: ghcr.io/dragonflydb/dragonfly:v1.24.0
replicas: 3
env:
- name: MAX_MEMORY

View file

@ -19,7 +19,6 @@ spec:
name: theshire
wait: true
interval: 30m
retryInterval: 1m
timeout: 5m
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
@ -42,5 +41,4 @@ spec:
name: theshire
wait: true
interval: 30m
retryInterval: 1m
timeout: 5m

View file

@ -44,17 +44,17 @@ spec:
{
"user_id": "tasmota",
"password": "{{ .x_emqx_tasmota_password }}",
"is_superuser": true # Until I can figure out authorization in emqx
"is_superuser": true
},
{
"user_id": "zwave",
"password": "{{ .x_emqx_homeassistant_password }}",
"is_superuser": true # Until I can figure out authorization in emqx
"is_superuser": true
},
{
"user_id": "zwave",
"password": "{{ .x_emqx_zwave_password }}",
"is_superuser": true # Until I can figure out authorization in emqx
"is_superuser": true
}
]

View file

@ -9,7 +9,7 @@ spec:
chart:
spec:
chart: emqx-operator
version: 2.2.24
version: 2.2.25
sourceRef:
kind: HelmRepository
name: emqx
@ -26,6 +26,8 @@ spec:
- name: cert-manager
namespace: cert-manager
values:
podannotations:
reloader.stakater.com/auto: "true"
fullnameOverride: emqx-operator
image:
repository: ghcr.io/emqx/emqx-operator

View file

@ -5,7 +5,7 @@ kind: EMQX
metadata:
name: emqx
spec:
image: public.ecr.aws/emqx/emqx:5.8.0
image: public.ecr.aws/emqx/emqx:5.8.1
config:
mode: Merge
coreTemplate:

View file

@ -2,7 +2,7 @@ authentication {
backend = "built_in_database"
mechanism = "password_based"
password_hash_algorithm {
name = "bcrypt",
name = "bcrypt"
}
user_id_type = "username"
bootstrap_file = "/opt/init-user.json"
@ -11,11 +11,6 @@ authentication {
authorization {
sources = [
{
type = file
enable = true
path = "/opt/acl.conf"
},
{
type = built_in_database
enable = true

View file

@ -19,7 +19,6 @@ spec:
name: theshire
wait: true
interval: 30m
retryInterval: 1m
timeout: 5m
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
@ -42,5 +41,4 @@ spec:
name: theshire
wait: true
interval: 30m
retryInterval: 1m
timeout: 5m

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -31,7 +31,7 @@ spec:
app:
image:
repository: ghcr.io/autobrr/autobrr
tag: v1.47.1@sha256:5364237a98b12bb269384dd4f4feaf7583027591c26bc2e6aaac0baa4e142b34
tag: v1.48.0@sha256:0ae19e3beedf491396e450b024c23e9e24df4d692286c0442a81fa699493def0
env:
AUTOBRR__CHECK_FOR_UPDATES: "false"
AUTOBRR__HOST: 0.0.0.0

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -30,7 +30,7 @@ spec:
app:
image:
repository: docker.io/excalidraw/excalidraw
tag: latest@sha256:df3ddf00d7977d7007ea33b5397239ff20955eeeeafd38735cb1604be799564a
tag: latest@sha256:687708a6c879b9120c82b61faf9ff242367395be1d5f90c385989405e51b5433
pullPolicy: IfNotPresent
probes:
liveness:

View file

@ -17,7 +17,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -36,7 +36,7 @@ spec:
app:
image:
repository: ghcr.io/onedr0p/home-assistant
tag: 2024.10.1@sha256:04614835418d2bdacd64685b516e58e7c5446f72485d446e7635282ba1a06c43
tag: 2024.10.3@sha256:59cb3b01ea4695c5df8f4cc1e4d01fa7e22090caa3fd3f000a96b6a5de909f91
env:
TZ: America/Chicago
envFrom:

View file

@ -19,7 +19,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -17,7 +17,6 @@ spec:
name: theshire
wait: false # no flux ks dependents
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -22,6 +22,7 @@ resources:
- ./recyclarr/ks.yaml
- ./redlib/ks.yaml
- ./sabnzbd/ks.yaml
- ./scrypted/ks.yaml
- ./searxng/ks.yaml
- ./sonarr/ks.yaml
- ./tautulli/ks.yaml

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -3,7 +3,7 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- ../../../../templates/gatus/internal
- ../../../../templates/volsync
- ./externalsecret.yaml
- ./helmrelease.yaml
- ../../../../templates/gatus/internal
- ../../../../templates/volsync

View file

@ -22,7 +22,6 @@ spec:
name: theshire
wait: false # no flux ks dependents
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -5,4 +5,4 @@ kind: Kustomization
resources:
- ./helmrelease.yaml
- ../../../../templates/volsync
# - ../../../../templates/gatus/internal
- ../../../../templates/gatus/internal

View file

@ -19,7 +19,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -20,7 +20,6 @@ spec:
- name: volsync
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -18,6 +18,11 @@ spec:
values:
defaultPodOptions:
automountServiceAccountToken: false
securityContext:
runAsUser: 1000
runAsGroup: 1000
fsGroup: 1000
fsGroupChangePolicy: "OnRootMismatch"
controllers:
backend:
@ -25,13 +30,6 @@ spec:
annotations:
secret.reloader.stakater.com/reload: piped-secret
pod:
securityContext:
runAsUser: 1000
runAsGroup: 1000
fsGroup: 1000
fsGroupChangePolicy: "OnRootMismatch"
containers:
app:
image:
@ -58,21 +56,13 @@ spec:
frontend:
strategy: RollingUpdate
pod:
securityContext:
runAsUser: 101
runAsGroup: 101
fsGroup: 101
fsGroupChangePolicy: "OnRootMismatch"
containers:
app:
image:
repository: ghcr.io/bjw-s-labs/piped-frontend
tag: latest@sha256:c4cb0cfbdf149cdb738fb9e41a5cc748a7ea53053f4c5e036b9f7578d9273328
tag: 2024.10.19@sha256:73dc140014c715b503b0b8b806fd2e6ed070eeef6b2556b28a4381c690ebfdc8
env:
HTTP_PORT: 8080
HTTP_WORKERS: 4
BACKEND_HOSTNAME: piped-api.hsn.dev
probes:
liveness:
@ -87,21 +77,19 @@ spec:
memory: 256Mi
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
ytproxy:
strategy: RollingUpdate
pod:
securityContext:
runAsUser: 1000
runAsGroup: 1000
fsGroup: 1000
fsGroupChangePolicy: "OnRootMismatch"
containers:
app:
image:
repository: 1337kavin/piped-proxy
tag: latest@sha256:9872edd2c47c9c33dfa44c334e4cef4e2c6ec91638eb2dcf6ca36b7b3037fd59
tag: latest@sha256:47cf993679d1bf6b1dbfd3282e143818f2f11106832605789d1e26df52ac7b41
command:
- /app/piped-proxy
probes:

View file

@ -19,7 +19,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
@ -31,7 +31,7 @@ spec:
app:
image:
repository: ghcr.io/onedr0p/prowlarr-develop
tag: 1.25.1.4770@sha256:8b59eb7f9e5321b702bdacae3468b63d71720091ba3b0e9dfaca686a7705d2b8
tag: 1.25.4.4818@sha256:5a936e5c73ebedfc45f1fa2541e84862a6c0df75b8f5148a082119fb65d55e2b
env:
# Ref: https://github.com/Radarr/Radarr/issues/7030#issuecomment-1039689518
# Ref: https://github.com/dotnet/runtime/issues/9336

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
@ -31,7 +31,7 @@ spec:
app:
image:
repository: ghcr.io/onedr0p/radarr-develop
tag: 5.12.0.9255
tag: 5.13.0.9361
env:
RADARR__APP__INSTANCENAME: Radarr
RADARR__APP__THEME: dark

View file

@ -22,7 +22,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -11,6 +11,12 @@ sonarr:
quality_definition:
type: series
include:
# Comment out any of the following includes to disable them
- template: sonarr-quality-definition-anime
- template: sonarr-v4-quality-profile-anime
- template: sonarr-v4-custom-formats-anime
quality_profiles:
- name: Web 1080p
reset_unmatched_scores:
@ -100,6 +106,22 @@ sonarr:
assign_scores_to:
- name: Web 1080p
- name: Web 720p
# Anime Score Overrides
- trash_ids:
- 026d5aadd1a6b4e550b134cb6c72b3ca # Uncensored
assign_scores_to:
- name: Remux-1080p - Anime
score: 0 # Adjust scoring as desired
- trash_ids:
- b2550eb333d27b75833e25b8c2557b38 # 10bit
assign_scores_to:
- name: Remux-1080p - Anime
score: 0 # Adjust scoring as desired
- trash_ids:
- 418f50b10f1907201b6cfdf881f467b7 # Anime Dual Audio
assign_scores_to:
- name: Remux-1080p - Anime
score: 0 # Adjust scoring as desired
radarr:
radarr:

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -38,7 +38,7 @@ spec:
app:
image:
repository: quay.io/redlib/redlib
tag: latest@sha256:e61e2535518e0b574f92642612f33f6fbee1aa22b2ff36ee740e26a025bb0039
tag: latest@sha256:a3f1eca3ebfc043eea4bc8274b1e9fe86ec15d0efa13536bd519f36eb18f763d
env:
REDLIB_DEFAULT_SHOW_NSFW: on
REDLIB_DEFAULT_WIDE: on

View file

@ -19,7 +19,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
@ -31,7 +31,7 @@ spec:
app:
image:
repository: ghcr.io/onedr0p/sabnzbd
tag: 4.3.3@sha256:6614d759bbaa6884926c6aa75018339bd35cd1add0ff92c907087327dd470477
tag: 4.3.3@sha256:86c645db93affcbf01cc2bce2560082bfde791009e1506dba68269b9c50bc341
env:
TZ: America/Chicago
SABNZBD__PORT: &port 80

View file

@ -21,7 +21,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -0,0 +1,120 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: &app scrypted
spec:
interval: 30m
chart:
spec:
chart: app-template
version: 3.5.1
interval: 30m
sourceRef:
kind: HelmRepository
name: bjw-s
namespace: flux-system
values:
controllers:
scrypted:
annotations:
reloader.stakater.com/auto: "true"
pod:
nodeSelector:
google.feature.node.kubernetes.io/coral: "true"
nvidia.com/gpu.present: "true"
securityContext:
supplementalGroups:
- 568
containers:
app:
image:
repository: ghcr.io/koush/scrypted
tag: v0.121.0-jammy-nvidia
probes:
liveness:
enabled: true
readiness:
enabled: true
startup:
enabled: true
spec:
failureThreshold: 30
periodSeconds: 5
resources:
requests:
cpu: 136m
memory: 1024Mi
limits:
nvidia.com/gpu: 1
memory: 8192Mi
securityContext:
privileged: true
service:
app:
controller: *app
type: LoadBalancer
annotations:
io.cilium/lb-ipam-ips: 10.1.1.33
nameOverride: *app
ports:
http:
port: 11080
primary: true
rebroadcast1: # driveway
port: 39655
rebroadcast2: # sideyard
port: 46561
rebroadcast3: # doorbell
port: 44759
homekit: # homekit
port: 42010
homekit-bridge: # bridge
port: 33961
ingress:
app:
className: "internal-nginx"
annotations:
hosts:
- host: &host scrypted.jahanson.tech
paths:
- path: /
service:
identifier: app
port: http
tls:
- hosts:
- *host
persistence:
config:
existingClaim: scrypted
advancedMounts:
scrypted:
app:
- path: /server/volume
cache:
type: emptyDir
globalMounts:
- path: /.cache
cache-npm:
type: emptyDir
globalMounts:
- path: /.npm
dev-bus-usb:
type: hostPath
hostPath: /dev/bus/usb
hostPathType: Directory
sys-bus-usb:
type: hostPath
hostPath: /sys/bus/usb
hostPathType: Directory
recordings:
type: nfs
server: shadowfax.jahanson.tech
path: /nahar/scrypted
globalMounts:
- path: /recordings

View file

@ -0,0 +1,7 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/kustomization
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- ./helmrelease.yaml
- ../../../../templates/volsync

View file

@ -0,0 +1,30 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: &appname scrypted
namespace: flux-system
spec:
targetNamespace: default
commonMetadata:
labels:
app.kubernetes.io/name: *appname
interval: 30m
timeout: 5m
path: "./kubernetes/apps/default/scrypted/app"
prune: true
sourceRef:
kind: GitRepository
name: theshire
wait: false
dependsOn:
- name: rook-ceph-cluster
- name: volsync
- name: external-secrets-stores
postBuild:
substitute:
APP: *appname
APP_UID: "0"
APP_GID: "0"
VOLSYNC_CAPACITY: 5Gi

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -20,7 +20,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -22,7 +22,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
@ -31,7 +31,7 @@ spec:
app:
image:
repository: ghcr.io/tautulli/tautulli
tag: v2.14.5@sha256:6017b491d8e9100a97391b639fff5824ad36a315c69aae3c9ed78407994a626e
tag: v2.14.6@sha256:f54d2d3a78780c765cd7a10b882474909f50247b5d2d118badaa9c035421effd
env:
TZ: America/Chicago
command: ["/usr/local/bin/python", "Tautulli.py"]

View file

@ -20,7 +20,6 @@ spec:
- name: volsync
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -19,5 +19,4 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m

View file

@ -36,7 +36,7 @@ spec:
app:
image:
repository: ghcr.io/zwave-js/zwave-js-ui
tag: 9.21.1@sha256:a28eaf01060dbe2fa30045d6b2ac6a31bc34efbebb7aa7d19787929929aea16a
tag: 9.24.0@sha256:ed648be6b058c6aa74abca1868c3ac48cb82b06b22ef0ef4f7ba66dd9d331bfc
env:
TZ: America/Chicago
PORT: &port 80

View file

@ -17,7 +17,6 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m
postBuild:
substitute:

View file

@ -3,11 +3,14 @@
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: flux-webhooks
name: &app flux-webhooks
namespace: flux-system
labels:
substitution.flux.home.arpa/enabled: "true"
spec:
commonMetadata:
labels:
app.kubernetes.io/name: *app
interval: 10m
path: ./kubernetes/apps/flux-system/add-ons/webhooks
prune: true
@ -20,11 +23,14 @@ spec:
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: flux-monitoring
name: &app flux-monitoring
namespace: flux-system
labels:
substitution.flux.home.arpa/enabled: "true"
spec:
commonMetadata:
labels:
app.kubernetes.io/name: *app
interval: 10m
path: ./kubernetes/apps/flux-system/add-ons/monitoring
prune: true

View file

@ -0,0 +1,56 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: &app chronyd
spec:
chart:
spec:
chart: app-template
version: 3.5.1
interval: 30m
sourceRef:
kind: HelmRepository
name: bjw-s
namespace: flux-system
interval: 30m
values:
controllers:
chronyd:
type: daemonset
strategy: RollingUpdate
annotations:
reloader.stakater.com/auto: "true"
pod:
tolerations:
- key: node-role.kubernetes.io/master
effect: NoSchedule
containers:
app:
image:
repository: docker.io/library/rockylinux
tag: 9
args:
- "/bin/bash"
- "-c"
- "dnf install -y chrony iputils dnsutils && chronyd -n -d"
resources:
requests:
cpu: 23m
memory: 50M
securityContext:
privileged: true
persistence:
config:
type: configMap
name: chronyd-configmap
globalMounts:
- path: /etc/chrony.conf
subPath: chrony.conf
readOnly: true
data:
type: emptyDir
globalMounts:
- path: /var/lib/chrony

View file

@ -0,0 +1,12 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/kustomization
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- ./helmrelease.yaml
configMapGenerator:
- name: chronyd-configmap
files:
- chrony.conf=./resources/chrony.conf
generatorOptions:
disableNameSuffixHash: true

View file

@ -0,0 +1,27 @@
# Cloudflare time servers
server time.cloudflare.com iburst
# Record the rate at which the system clock gains/losses time.
driftfile /var/lib/chrony/drift
# Allow the system clock to be stepped in the first three updates
# if its offset is larger than 1 second.
makestep 1.0 3
# Enable kernel synchronization of the real-time clock (RTC).
rtcsync
# Specify file containing keys for NTP authentication.
keyfile /etc/chrony.keys
# Save NTS keys and cookies.
ntsdumpdir /var/lib/chrony
# Insert/delete leap seconds by slewing instead of stepping.
#leapsecmode slew
# Get TAI-UTC offset and leap seconds from the system tz database.
leapsectz right/UTC
# Specify directory for log files.
logdir /var/log/chrony

View file

@ -0,0 +1,20 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: &app chronyd
namespace: flux-system
spec:
targetNamespace: kube-system
commonMetadata:
labels:
app.kubernetes.io/name: *app
path: ./kubernetes/apps/kube-system/chronyd/app
prune: true
sourceRef:
kind: GitRepository
name: theshire
wait: false
interval: 30m
timeout: 5m

View file

@ -10,7 +10,7 @@ spec:
chart:
spec:
chart: cilium
version: 1.16.2
version: 1.16.3
sourceRef:
kind: HelmRepository
name: cilium

View file

@ -7,7 +7,8 @@ metadata:
spec:
loadBalancerIPs: true
# interfaces: ["^enp.*|^eth.*|^ens.*|^eno.*"]
interfaces: ["^eno+|^enp+"]
interfaces: ["^eno+|^enp+|^bond+"]
# interfaces: ["^bond+"]
nodeSelector:
matchLabels:
kubernetes.io/os: linux

View file

@ -17,7 +17,6 @@ spec:
name: theshire
wait: true
interval: 30m
retryInterval: 1m
timeout: 5m
---
# yaml-language-server: $schema=https://ks.hsn.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
@ -40,5 +39,4 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m

View file

@ -17,5 +17,4 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m

View file

@ -0,0 +1,64 @@
---
# yaml-language-server: $schema=https://ks.hsn.dev/helm.toolkit.fluxcd.io/helmrelease_v2.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: descheduler
spec:
interval: 30m
chart:
spec:
chart: descheduler
version: 0.31.0
sourceRef:
kind: HelmRepository
name: descheduler
namespace: flux-system
install:
remediation:
retries: 3
upgrade:
cleanupOnFail: true
remediation:
strategy: rollback
retries: 3
values:
replicas: 2
kind: Deployment
deschedulerPolicyAPIVersion: descheduler/v1alpha2
deschedulerPolicy:
profiles:
- name: Default
pluginConfig:
- name: DefaultEvictor
args:
evictFailedBarePods: true
evictLocalStoragePods: true
evictSystemCriticalPods: true
nodeFit: true
- name: RemovePodsViolatingInterPodAntiAffinity
- name: RemovePodsViolatingNodeAffinity
args:
nodeAffinityType:
- requiredDuringSchedulingIgnoredDuringExecution
- name: RemovePodsViolatingNodeTaints
- name: RemovePodsViolatingTopologySpreadConstraint
args:
constraints:
- DoNotSchedule
- ScheduleAnyway
plugins:
balance:
enabled:
- RemovePodsViolatingTopologySpreadConstraint
deschedule:
enabled:
- RemovePodsViolatingInterPodAntiAffinity
- RemovePodsViolatingNodeAffinity
- RemovePodsViolatingNodeTaints
service:
enabled: true
serviceMonitor:
enabled: true
leaderElection:
enabled: true

View file

@ -0,0 +1,6 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/kustomization
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- ./helmrelease.yaml

View file

@ -0,0 +1,20 @@
---
# yaml-language-server: $schema=https://kubernetes-schemas.pages.dev/kustomize.toolkit.fluxcd.io/kustomization_v1.json
apiVersion: kustomize.toolkit.fluxcd.io/v1
kind: Kustomization
metadata:
name: &app descheduler
namespace: flux-system
spec:
targetNamespace: kube-system
commonMetadata:
labels:
app.kubernetes.io/name: *app
path: ./kubernetes/apps/kube-system/descheduler/app
prune: true
sourceRef:
kind: GitRepository
name: theshire
wait: false
interval: 30m
timeout: 5m

View file

@ -1,5 +1,5 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2beta2.schema.json
# yaml-language-server: $schema=https://raw.githubusercontent.com/bjw-s/helm-charts/main/charts/other/app-template/schemas/helmrelease-helm-v2.schema.json
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:

View file

@ -17,5 +17,4 @@ spec:
name: theshire
wait: false
interval: 30m
retryInterval: 1m
timeout: 5m

Some files were not shown because too many files have changed in this diff Show more