Compare commits

..

27 commits

Author SHA1 Message Date
b078847320 Update Rook Ceph group to v1.13.0 2023-12-20 18:03:09 +00:00
b8cb7d6299 Merge pull request 'Update Rook Ceph group to v1.12.10 (patch)' (#166) from renovate/patch-rook-ceph into main
Reviewed-on: #166
2023-12-20 17:40:23 +00:00
405a3992f7 Merge pull request 'Update Helm release external-secrets to v0.9.10' (#169) from renovate/external-secrets-0.x into main
Reviewed-on: #169
2023-12-20 17:29:10 +00:00
5c02f78674 Merge pull request 'Update quay.io/prometheus/alertmanager:main Docker digest to 9346cb8' (#170) from renovate/quay.io-prometheus-alertmanager-main into main
Reviewed-on: #170
2023-12-20 17:27:28 +00:00
021ccdf333 Merge pull request 'Update Thanos group' (#157) from renovate/thanos into main
Reviewed-on: #157
2023-12-20 17:24:47 +00:00
badfea9e4a v1.0.56 was pulled. 2023-12-20 17:21:39 +00:00
cd089d36ea Merge pull request 'Update Helm release reloader to v1.0.56' (#164) from renovate/reloader-1.x into main
Reviewed-on: #164
2023-12-20 17:18:49 +00:00
7acd219500 Merge pull request 'Update kube-prometheus-stack Docker tag to v55.5.0' (#165) from renovate/kube-prometheus-stack-55.x into main
Reviewed-on: #165
2023-12-20 17:14:17 +00:00
d2a696190a Merge pull request 'Update Flux group to v2.2.2 (patch)' (#171) from renovate/patch-flux into main
Reviewed-on: #171
2023-12-20 17:13:30 +00:00
e70abd0924 Update kube-prometheus-stack Docker tag to v55.5.0 2023-12-20 17:01:25 +00:00
9fc66aeea9 Update Flux group to v2.2.2 2023-12-20 17:00:52 +00:00
8070ee5cab
update storage class 2023-12-20 10:33:20 -06:00
34266c8e8d
Updating flux. 2023-12-20 10:31:11 -06:00
0f016c8d0f
Updating storage classes from local to ceph-block. 2023-12-20 09:56:23 -06:00
7b664c146e
replicated over erasure coded, enabling ceph snapshot 2023-12-20 09:24:56 -06:00
0498be4f61
Benchmark PVC. 2023-12-20 08:48:25 -06:00
be962f4bab Add wipe-rook tool. 2023-12-20 07:52:11 -06:00
65ca38b9ec Update Helm release reloader to v1.0.56 2023-12-20 10:01:08 +00:00
7c30c7afb3
Updating to fastlane. 2023-12-19 23:07:54 -06:00
41cb6d3539
Cluster upgrade to 1.6.0 2023-12-19 23:02:45 -06:00
d700b235f8
Add nessa. 2023-12-19 22:00:31 -06:00
9aca885667
Update IPs 2023-12-19 21:57:42 -06:00
3bf7b9b768
Add IP 2023-12-19 21:37:00 -06:00
5bfb56d028 Update Thanos group 2023-12-19 21:01:17 +00:00
9c9b6a06c3 Update quay.io/prometheus/alertmanager:main Docker digest to 9346cb8 2023-12-18 19:00:37 +00:00
e6be27fee4 Update Helm release external-secrets to v0.9.10 2023-12-16 18:00:47 +00:00
334ee918ca Update Rook Ceph group to v1.12.10 2023-12-13 21:01:11 +00:00
16 changed files with 243 additions and 49 deletions

View file

@ -11,7 +11,7 @@ spec:
repository: "${rsrc}-restic-secret"
destinationPVC: "${claim}"
copyMethod: Direct
storageClassName: local-hostpath
storageClassName: ceph-block
# IMPORTANT NOTE:
# Set to the last X number of snapshots to restore from
previous: ${previous}

View file

@ -83,7 +83,7 @@ spec:
- name: config
accessMode: ReadWriteOnce
size: 50Gi
storageClass: local-hostpath
storageClass: ceph-block
globalMounts:
- path: /config
service:

View file

@ -12,4 +12,4 @@ spec:
requests:
storage: 20Gi
storageClassName: local-hostpath
storageClassName: ceph-block

View file

@ -27,7 +27,7 @@ spec:
main:
image:
repository: quay.io/prometheus/alertmanager
tag: main@sha256:cf3b474d32e1f66fd2d80750bf35529aa4b49dad724857f4c481ab9a53befd94
tag: main@sha256:9346cb845868c70d37c89f7d0ff66debb3bce166410ff5251281cf03a8c54d84
pullPolicy: IfNotPresent
podAnnotations:
reloader.stakater.com/auto: "true"

View file

@ -11,7 +11,7 @@ spec:
chart:
spec:
chart: kube-prometheus-stack
version: 55.3.1
version: 55.5.0
sourceRef:
kind: HelmRepository
name: prometheus-community
@ -115,7 +115,7 @@ spec:
storage:
volumeClaimTemplate:
spec:
storageClassName: local-hostpath
storageClassName: ceph-block
resources:
requests:
storage: 1Gi
@ -193,7 +193,7 @@ spec:
enableAdminAPI: true
walCompression: true
thanos:
image: quay.io/thanos/thanos:v0.32.5
image: quay.io/thanos/thanos:v0.33.0
objectStorageConfig:
name: thanos-s3-secret
key: objstore.yml
@ -202,7 +202,7 @@ spec:
storageSpec:
volumeClaimTemplate:
spec:
storageClassName: local-hostpath
storageClassName: ceph-block
resources:
requests:
storage: 20Gi

View file

@ -11,7 +11,7 @@ spec:
chart:
spec:
chart: thanos
version: 12.16.1
version: 12.20.0
sourceRef:
kind: HelmRepository
name: bitnami
@ -31,7 +31,7 @@ spec:
image:
registry: quay.io
repository: thanos/thanos
tag: v0.32.5
tag: v0.33.0
existingObjstoreSecret: thanos-s3-secret
queryFrontend:
enabled: true
@ -66,14 +66,14 @@ spec:
retentionResolution1h: 90d
persistence:
enabled: true
storageClass: local-hostpath
storageClass: ceph-block
size: 20Gi
storegateway:
enabled: true
replicaCount: 3
persistence:
enabled: true
storageClass: local-hostpath
storageClass: ceph-block
size: 10Gi
ruler:
enabled: true
@ -94,7 +94,7 @@ spec:
severity: critical
persistence:
enabled: true
storageClass: local-hostpath
storageClass: ceph-block
size: 5Gi
metrics:
enabled: true

View file

@ -58,11 +58,12 @@ spec:
- name: "nienna"
devices:
- name: /dev/disk/by-id/nvme-SAMSUNG_MZVLB1T0HALR-00000_S3W6NA0M610693
- name: /dev/disk/by-id/ata-ST16000NM001J-2TW113_ZR5E7NQR
- name: "orome"
devices:
- name: /dev/disk/by-id/nvme-SAMSUNG_MZVLB1T0HBLR-00000_S4GJNX0R613503
- name: /dev/disk/by-id/ata-ST16000NM001J-2TW113_ZR6021Z3
- name: "nessa"
devices:
- name: /dev/disk/by-id/nvme-SAMSUNG_MZVL21T0HCLR-00B00_S676NU0W641201
resources:
mgr:
limits:
@ -72,16 +73,18 @@ spec:
cpu: "1000m"
memory: "4Gi"
cephBlockPoolsVolumeSnapshotClass:
enabled: false
cephFileSystemVolumeSnapshotClass:
enabled: true
name: csi-ceph-filesystem
isDefault: false
deletionPolicy: Delete
cephBlockPools:
- name: ceph-blockpool
spec:
failureDomain: host
erasureCoded:
dataChunks: 2
codingChunks: 1
replicated:
size: 3
storageClass:
enabled: true
name: ceph-block

View file

@ -10,7 +10,7 @@ spec:
chart:
spec:
chart: external-secrets
version: 0.9.9
version: 0.9.10
interval: 30m
sourceRef:
kind: HelmRepository

View file

@ -10,7 +10,7 @@ spec:
chart:
spec:
chart: reloader
version: 1.0.52
version: 1.0.55
sourceRef:
kind: HelmRepository
name: stakater

View file

@ -3,7 +3,7 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- github.com/fluxcd/flux2/manifests/install?ref=v2.1.2
- github.com/fluxcd/flux2/manifests/install?ref=v2.2.2
patches:
- patch: |-
$patch: delete

View file

@ -9,7 +9,7 @@ spec:
interval: 10m
url: oci://ghcr.io/fluxcd/flux-manifests
ref:
tag: v2.1.2@sha256:5502bbd944688e3a6e1804521be7bcfcb66cf72d130196fe2736e00c6016525f
tag: v2.2.2
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/fluxcd-community/flux2-schemas/main/kustomization-kustomize-v1.json
apiVersion: kustomize.toolkit.fluxcd.io/v1

View file

@ -0,0 +1,48 @@
---
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: kbench-pvc
spec:
storageClassName: ceph-block
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 33Gi
---
apiVersion: batch/v1
kind: Job
metadata:
name: kbench
spec:
template:
metadata:
labels:
kbench: fio
spec:
containers:
- name: kbench
image: yasker/kbench:latest
imagePullPolicy: Always
env:
# - name: QUICK_MODE # for debugging
# value: "1"
- name: FILE_NAME
value: "/volume/test"
- name: SIZE
value: "30G" # must be 10% smaller than the PVC size due to filesystem also took space
- name: CPU_IDLE_PROF
value: "disabled" # must be "enabled" or "disabled"
volumeMounts:
- name: vol
mountPath: /volume/
# volumeDevices:
# - name: vol
# devicePath: /volume/test
restartPolicy: Never
volumes:
- name: vol
persistentVolumeClaim:
claimName: kbench-pvc
backoffLimit: 0

View file

@ -0,0 +1,108 @@
---
apiVersion: v1
kind: Pod
metadata:
name: disk-wipe-nessa
spec:
restartPolicy: Never
nodeName: nessa
containers:
- name: disk-wipe
image: ghcr.io/onedr0p/alpine:3.18.4@sha256:b0b6f6f42bf9649ccaf0e98cd74d5e123471e2c4a4db4a5ee417b18dde9973a9
securityContext:
privileged: true
resources: {}
env:
- name: CEPH_DISK
value: "/dev/nvme0n1"
command:
[
"/bin/sh",
"-c"
]
args:
- apk add --no-cache sgdisk util-linux parted;
sgdisk --zap-all $CEPH_DISK;
blkdiscard $CEPH_DISK;
dd if=/dev/zero bs=1M count=1000 oflag=direct of=$CEPH_DISK;
partprobe $CEPH_DISK;
volumeMounts:
- mountPath: /mnt/host_var
name: host-var
volumes:
- name: host-var
hostPath:
path: /var
---
apiVersion: v1
kind: Pod
metadata:
name: disk-wipe-nienna
spec:
restartPolicy: Never
nodeName: nienna
containers:
- name: disk-wipe
image: ghcr.io/onedr0p/alpine:3.18.4@sha256:b0b6f6f42bf9649ccaf0e98cd74d5e123471e2c4a4db4a5ee417b18dde9973a9
securityContext:
privileged: true
resources: {}
env:
- name: CEPH_DISK
value: "/dev/nvme0n1"
command:
[
"/bin/sh",
"-c"
]
args:
- apk add --no-cache sgdisk util-linux parted;
sgdisk --zap-all $CEPH_DISK;
blkdiscard $CEPH_DISK;
dd if=/dev/zero bs=1M count=1000 oflag=direct of=$CEPH_DISK;
partprobe $CEPH_DISK;
volumeMounts:
- mountPath: /mnt/host_var
name: host-var
volumes:
- name: host-var
hostPath:
path: /var
---
apiVersion: v1
kind: Pod
metadata:
name: disk-wipe-orome
spec:
restartPolicy: Never
nodeName: orome
containers:
- name: disk-wipe
image: ghcr.io/onedr0p/alpine:3.18.4@sha256:b0b6f6f42bf9649ccaf0e98cd74d5e123471e2c4a4db4a5ee417b18dde9973a9
securityContext:
privileged: true
resources: {}
env:
- name: CEPH_DISK
value: "/dev/nvme0n1"
command:
[
"/bin/sh",
"-c"
]
args:
- apk add --no-cache sgdisk util-linux parted;
sgdisk --zap-all $CEPH_DISK;
blkdiscard $CEPH_DISK;
dd if=/dev/zero bs=1M count=1000 oflag=direct of=$CEPH_DISK;
partprobe $CEPH_DISK;
volumeMounts:
- mountPath: /mnt/host_var
name: host-var
volumes:
- name: host-var
hostPath:
path: /var

View file

@ -4,3 +4,4 @@ valinor-arlen.yaml
valinor-eonwe.yaml
valinor-nienna.yaml
valinor-orome.yaml
valinor-nessa.yaml

View file

@ -1,7 +1,7 @@
---
clusterName: valinor
talosVersion: v1.5.5
talosVersion: v1.6.0
kubernetesVersion: 1.28.4
endpoint: "https://${clusterEndpointIP}:6443"
@ -14,6 +14,13 @@ additionalApiServerCertSans:
additionalMachineCertSans:
- ${clusterEndpointIP}
imageFactory:
registryURL: factory.talos.dev
schematicEndpoint: /schematics
protocol: https
installerURLTmpl: "{{.RegistryURL}}/installer/{{.ID}}:{{.Version}}"
ISOURLTmpl: "{{.Protocol}}://{{.RegistryURL}}/image/{{.ID}}/{{.Version}}/{{.Mode}}-{{.Arch}}.iso"
nodes:
# cloud CAX21 Arm64
- hostname: arlen
@ -63,22 +70,19 @@ nodes:
- network: 10.2.0.0/16
gateway: 10.2.0.1 # The route's gateway (if empty, creates link scope route).
metric: 2048
# Bare-metal AX41-Nvme - AMD Ryzen 5 3600 6-Core Processor (Zen2) - 64GB ECC RAM
# VM AX41-Nvme - AMD Ryzen 5 3600 6-Core Processor (Zen2) - 64GB ECC RAM
- hostname: nienna
disableSearchDomain: true
ipAddress: ${niennaIP}
controlPlane: false
# customization:
# extraKernelArgs:
# - net.ifnames=0
# systemExtensions:
# officialExtensions:
# - siderolabs/amd-ucode
# - siderolabs/qemu-guest-agent
talosImageURL: harbor.hsn.dev/factory.talos.dev/installer/696bb48d9c48e567596f393a4ff9bfd26d4dda5d92c16beb580e96fa68d6324c
# https://factory.talos.dev/image/696bb48d9c48e567596f393a4ff9bfd26d4dda5d92c16beb580e96fa68d6324c/v1.5.5/metal-amd64.iso
# no guest agent in the raw.xz image
# https://factory.talos.dev/image/6c789e7a3eec37617fd9d239a7f696ba48e75bc4780f5cb30bf8882686d79a22/v1.5.5/metal-amd64.raw.xz
schematic:
customization:
extraKernelArgs:
- net.ifnames=0
systemExtensions:
officialExtensions:
- siderolabs/amd-ucode
- siderolabs/qemu-guest-agent
installDiskSelector:
busPath: /pci0000:00/0000:00:0a.0/virtio2/
networkInterfaces:
@ -96,21 +100,50 @@ nodes:
- network: 10.2.0.0/16
gateway: 10.2.1.1 # The route's gateway (if empty, creates link scope route).
metric: 2048
# VM AX41-Nvme - AMD Ryzen 5 3600 6-Core Processor (Zen2) - 64GB ECC RAM
- hostname: nessa
disableSearchDomain: true
ipAddress: ${nessaIP}
controlPlane: false
schematic:
customization:
extraKernelArgs:
- net.ifnames=0
systemExtensions:
officialExtensions:
- siderolabs/amd-ucode
- siderolabs/qemu-guest-agent
installDiskSelector:
busPath: /pci0000:00/0000:00:0a.0/virtio2/
networkInterfaces:
- interface: eth0
dhcp: true
- interface: eth1
dhcp: false
vlans:
- vlanId: 4010
mtu: 1400
addresses:
- 10.2.1.4/24
dhcp: false
routes:
- network: 10.2.0.0/16
gateway: 10.2.1.1 # The route's gateway (if empty, creates link scope route).
metric: 2048
# VM on EX44 - Intel Gen 13 (Raptor Lake) - 64GB RAM
- hostname: orome
disableSearchDomain: true
ipAddress: ${oromeIP}
controlPlane: false
# customization:
# extraKernelArgs:
# - net.ifnames=0
# systemExtensions:
# officialExtensions:
# - siderolabs/i915-ucode
# - siderolabs/intel-ucode
# - siderolabs/qemu-guest-agent
talosImageURL: harbor.hsn.dev/factory.talos.dev/installer/f2f665587318c2d79e7b315cc333fff276ed59c8de831f16e28b4db107496ac2
# https://factory.talos.dev/image/f2f665587318c2d79e7b315cc333fff276ed59c8de831f16e28b4db107496ac2/metal-amd64.iso
schematic:
customization:
extraKernelArgs:
- net.ifnames=0
systemExtensions:
officialExtensions:
- siderolabs/i915-ucode
- siderolabs/intel-ucode
- siderolabs/qemu-guest-agent
installDiskSelector:
busPath: /pci0000:00/0000:00:0a.0/virtio2/
networkInterfaces:

View file

@ -5,6 +5,7 @@ arlenIP: ENC[AES256_GCM,data:uXEM6zEuo40=,iv:eZMNksxYqpfYaY70yiJDOOnpOZ2cIfu4sE7
eonweIP: ENC[AES256_GCM,data:zfIK5G67zEQ=,iv:xXPae345ybW9u6SX5eNHwEcBe+Y/7Gvzt6qWni3x+k4=,tag:hFO15lqDviJz+dnsa8IgMg==,type:str]
niennaIP: ENC[AES256_GCM,data:3FRJBHRujl0=,iv:wd+Wp8DCXITYv4/Ys26+2GmeMXn0hvakxMUpDALqciE=,tag:P0Px35bWU0IzpH2H0i6dpA==,type:str]
oromeIP: ENC[AES256_GCM,data:xSp35+pBlyk=,iv:Utk+kCiUKbSrx3kCsEtc90VRWEC9FSZJvJ1fvLZWc38=,tag:6uHW+BiOau9PUS2I2OnVGA==,type:str]
nessaIP: ENC[AES256_GCM,data:iHaVLhItz0c=,iv:QBFH5xorX+WwPrSTQf0ZBBpJ9hg5itFkAnRlR4/vlm8=,tag:3i6iTRtXYMZ1uoYWvuthwQ==,type:str]
sops:
kms: []
gcp_kms: []
@ -20,8 +21,8 @@ sops:
MTFUZEplYVN5RGhhMGNEcDlGbTVQcjQKktwztZAHGUqoxbGHuAg0dX5Vap+wFVfx
ku6Hzg1ZU8Lvd8ODe+4p+RvHSKVll1akgpPVuymCUxl+I6EvH7gEDA==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2023-12-12T17:44:15Z"
mac: ENC[AES256_GCM,data:bXullHomsdG80EKIVrghmPIkcQMzWX/gvM8w0iqWRbunC4SlNTzFIgrHvs1qYdyPqy+rC2NhhhWGBVSDEfAA5wRQ/xmLPmFP/z9hKsUiQqHUwZflu2taB2SLuhjMMHS2sKwcP3uPA1anPkvEjhx+IpGv9X92RHqr8YF1r2LhOVk=,iv:OQwhjxw/FI/S9pXS9/HHTFdFxIetKUPcESscfJNjkao=,tag:AhoPRZifwQVPRO38fA/LSQ==,type:str]
lastmodified: "2023-12-20T03:55:35Z"
mac: ENC[AES256_GCM,data:1jRrWtUMqsDz65bF7NBTi9UZxuEzpKgTEyUUItEk5aXEfqmE7yQpfKl6p1yGKD9BDQOlLVa4Sj9daYiTCsis8g1OxB9SIdC//LcMKhwgv/rlqRW03nvtXo9628XLyLoITBzMej0FufVM9m973h5rmFL2Lom5s+v2Bej1+bxxM3g=,iv:ZoJVYy8ImmeYHgG0SCi2KMXnPMY+Zq7KqS9D8mnZ69I=,tag:jnTi/AOfjGfhj86x964FrA==,type:str]
pgp: []
unencrypted_suffix: _unencrypted
version: 3.8.1