mirror of
https://github.com/optim-enterprises-bv/homelab.git
synced 2025-10-30 01:22:31 +00:00
chore(cleanup): Cleanup after cluster rebuild
This commit is contained in:
@@ -51,6 +51,8 @@ spec:
|
||||
volumeMounts:
|
||||
- name: remark42
|
||||
mountPath: /srv/var
|
||||
- name: backup
|
||||
mountPath: /backup
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
@@ -62,3 +64,7 @@ spec:
|
||||
- name: remark42
|
||||
persistentVolumeClaim:
|
||||
claimName: remark42
|
||||
- name: backup
|
||||
nfs:
|
||||
server: 192.168.1.55
|
||||
path: /mnt/pool-0/backup
|
||||
|
||||
@@ -42,6 +42,7 @@ spec:
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: common-env
|
||||
optional: true
|
||||
volumeMounts:
|
||||
- name: lidarr-config
|
||||
mountPath: /config
|
||||
|
||||
@@ -42,6 +42,7 @@ spec:
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: common-env
|
||||
optional: true
|
||||
volumeMounts:
|
||||
- name: prowlarr-config
|
||||
mountPath: /config
|
||||
|
||||
@@ -42,6 +42,7 @@ spec:
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: common-env
|
||||
optional: true
|
||||
volumeMounts:
|
||||
- name: radarr-config
|
||||
mountPath: /config
|
||||
|
||||
@@ -42,6 +42,7 @@ spec:
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: common-env
|
||||
optional: true
|
||||
volumeMounts:
|
||||
- name: sonarr-config
|
||||
mountPath: /config
|
||||
|
||||
@@ -63,6 +63,7 @@ spec:
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: common-env
|
||||
optional: true
|
||||
volumeMounts:
|
||||
- name: torrent-config
|
||||
mountPath: /config
|
||||
|
||||
@@ -5,7 +5,7 @@ metadata:
|
||||
namespace: jellyfin
|
||||
spec:
|
||||
storageClassName: proxmox-csi
|
||||
volumeName: pv-jellyfin-config
|
||||
volumeName: pv-jellyfin
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
|
||||
@@ -5,7 +5,7 @@ metadata:
|
||||
namespace: plex
|
||||
spec:
|
||||
storageClassName: proxmox-csi
|
||||
volumeName: pv-plex-config
|
||||
volumeName: pv-plex
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
apiVersion: gateway.networking.k8s.io/v1
|
||||
kind: Gateway
|
||||
metadata:
|
||||
name: euclid
|
||||
namespace: gateway
|
||||
annotations:
|
||||
cert-manager.io/issuer: cloudflare-issuer
|
||||
spec:
|
||||
gatewayClassName: cilium
|
||||
infrastructure:
|
||||
annotations:
|
||||
io.cilium/lb-ipam-ips: 192.168.1.220
|
||||
listeners:
|
||||
- protocol: HTTPS
|
||||
port: 443
|
||||
name: https-gateway
|
||||
hostname: "*.euclid.stonegarden.dev"
|
||||
tls:
|
||||
certificateRefs:
|
||||
- kind: Secret
|
||||
name: cert-euclid
|
||||
allowedRoutes:
|
||||
namespaces:
|
||||
from: All
|
||||
@@ -6,6 +6,5 @@ resources:
|
||||
- ns.yaml
|
||||
- cloudflare-api-token.yaml
|
||||
- cloudflare-issuer.yaml
|
||||
- gw-euclid.yaml
|
||||
- gw-stonegarden.yaml
|
||||
- gw-tls-passthrough.yaml
|
||||
|
||||
@@ -6,7 +6,7 @@ module "talos" {
|
||||
}
|
||||
|
||||
image = {
|
||||
version = "v1.7.6"
|
||||
version = "v1.8.0-alpha.1"
|
||||
schematic = file("${path.module}/talos/image/schematic.yaml")
|
||||
}
|
||||
|
||||
@@ -17,9 +17,9 @@ module "talos" {
|
||||
|
||||
cluster = {
|
||||
name = "talos"
|
||||
endpoint = "192.168.1.101"
|
||||
endpoint = "192.168.1.102"
|
||||
gateway = "192.168.1.1"
|
||||
talos_version = "v1.7"
|
||||
talos_version = "v1.8"
|
||||
proxmox_cluster = "homelab"
|
||||
}
|
||||
|
||||
@@ -53,15 +53,15 @@ module "talos" {
|
||||
cpu = 4
|
||||
ram_dedicated = 4096
|
||||
}
|
||||
"work-00" = {
|
||||
host_node = "abel"
|
||||
machine_type = "controlplane"
|
||||
ip = "192.168.1.110"
|
||||
mac_address = "BC:24:11:2E:A8:00"
|
||||
vm_id = 810
|
||||
cpu = 8
|
||||
ram_dedicated = 4096
|
||||
}
|
||||
# "work-00" = {
|
||||
# host_node = "abel"
|
||||
# machine_type = "worker"
|
||||
# ip = "192.168.1.110"
|
||||
# mac_address = "BC:24:11:2E:A8:00"
|
||||
# vm_id = 810
|
||||
# cpu = 8
|
||||
# ram_dedicated = 4096
|
||||
# }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -26,3 +26,8 @@ output "talos_config" {
|
||||
value = module.talos.client_configuration.talos_config
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "image_schematic" {
|
||||
// "dcac6b92c17d1d8947a0cee5e0e6b6904089aa878c70d66196bb1138dbd05d1a"
|
||||
value = module.talos.schematic_id
|
||||
}
|
||||
@@ -10,7 +10,7 @@ terraform {
|
||||
}
|
||||
talos = {
|
||||
source = "siderolabs/talos"
|
||||
version = "0.5.0"
|
||||
version = "0.6.0-alpha.1"
|
||||
}
|
||||
restapi = {
|
||||
source = "Mastercard/restapi"
|
||||
|
||||
@@ -47,8 +47,9 @@ resource "talos_machine_configuration_apply" "this" {
|
||||
|
||||
resource "talos_machine_bootstrap" "this" {
|
||||
depends_on = [talos_machine_configuration_apply.this]
|
||||
for_each = var.nodes
|
||||
node = each.value.ip
|
||||
//for_each = var.nodes
|
||||
//node = each.value.ip
|
||||
node = [for k, v in var.nodes : v.ip if v.machine_type == "controlplane"][2]
|
||||
endpoint = var.cluster.endpoint
|
||||
client_configuration = talos_machine_secrets.this.client_configuration
|
||||
}
|
||||
@@ -58,6 +59,7 @@ data "talos_cluster_health" "this" {
|
||||
talos_machine_configuration_apply.this,
|
||||
talos_machine_bootstrap.this
|
||||
]
|
||||
skip_kubernetes_checks = false
|
||||
client_configuration = data.talos_client_configuration.this.client_configuration
|
||||
control_plane_nodes = [for k, v in var.nodes : v.ip if v.machine_type == "controlplane"]
|
||||
worker_nodes = [for k, v in var.nodes : v.ip if v.machine_type == "worker"]
|
||||
@@ -67,12 +69,12 @@ data "talos_cluster_health" "this" {
|
||||
}
|
||||
}
|
||||
|
||||
data "talos_cluster_kubeconfig" "this" {
|
||||
resource "talos_cluster_kubeconfig" "this" {
|
||||
depends_on = [
|
||||
talos_machine_bootstrap.this,
|
||||
data.talos_cluster_health.this
|
||||
]
|
||||
node = [for k, v in var.nodes : v.ip if v.machine_type == "controlplane"][1]
|
||||
node = [for k, v in var.nodes : v.ip if v.machine_type == "controlplane"][2]
|
||||
endpoint = var.cluster.endpoint
|
||||
client_configuration = talos_machine_secrets.this.client_configuration
|
||||
timeouts = {
|
||||
|
||||
@@ -2,11 +2,13 @@ locals {
|
||||
version = var.image.version
|
||||
schematic = var.image.schematic
|
||||
schematic_id = jsondecode(data.http.schematic_id.response_body)["id"]
|
||||
#schematic_id = talos_image_factory_schematic.this.id
|
||||
image_id = "${local.schematic_id}_${local.version}"
|
||||
|
||||
update_version = coalesce(var.image.update_version, var.image.version)
|
||||
update_schematic = coalesce(var.image.update_schematic, var.image.schematic)
|
||||
update_schematic_id = jsondecode(data.http.updated_schematic_id.response_body)["id"]
|
||||
#update_schematic_id = talos_image_factory_schematic.this.id
|
||||
update_image_id = "${local.update_schematic_id}_${local.update_version}"
|
||||
}
|
||||
|
||||
@@ -22,6 +24,45 @@ data "http" "updated_schematic_id" {
|
||||
request_body = local.update_schematic
|
||||
}
|
||||
|
||||
/* Testing out new provider schematic feature */
|
||||
|
||||
data "talos_image_factory_extensions_versions" "this" {
|
||||
talos_version = var.image.version
|
||||
filters = {
|
||||
names = [
|
||||
"i915-ucode",
|
||||
"intel-ucode",
|
||||
"qemu-guest-agent"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
resource "talos_image_factory_schematic" "generated" {
|
||||
schematic = yamlencode(
|
||||
{
|
||||
customization = {
|
||||
systemExtensions = {
|
||||
officialExtensions = data.talos_image_factory_extensions_versions.this.extensions_info.*.name
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
output "schematic_id" {
|
||||
value = talos_image_factory_schematic.generated.id
|
||||
}
|
||||
|
||||
resource "talos_image_factory_schematic" "this" {
|
||||
schematic = local.schematic
|
||||
}
|
||||
|
||||
resource "talos_image_factory_schematic" "updated" {
|
||||
schematic = local.update_schematic
|
||||
}
|
||||
|
||||
/* Testing out new provider schematic feature */
|
||||
|
||||
resource "proxmox_virtual_environment_download_file" "this" {
|
||||
for_each = toset(distinct([for k, v in var.nodes : "${v.host_node}_${v.update == true ? local.update_image_id : local.image_id}"]))
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ output "client_configuration" {
|
||||
}
|
||||
|
||||
output "kube_config" {
|
||||
value = data.talos_cluster_kubeconfig.this
|
||||
#value = data.talos_cluster_kubeconfig.this
|
||||
value = talos_cluster_kubeconfig.this
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ terraform {
|
||||
}
|
||||
talos = {
|
||||
source = "siderolabs/talos"
|
||||
version = ">=0.5.0"
|
||||
version = ">=0.6.0-alpha.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user