Skip to content

vault: Prepare template to expose rpc service (and ingress)

Antoine R. Dumont requested to merge add-vault-rpc-template into production

This:

  • [main] opens up a template for the vault rpc server (& ingress), matching what we did for the scheduler rpc (& ingress).
  • toolbox: adds the vault configuration so we can check or migrate db model from it
  • adapt the cooker configmap to reuse the vault configuration (aligning with other templates)
  • staging: prepare the vault template so rpc (& ingress) can be migrated to the dynamic infrastructure (this actually does not migrate anything yet)

Annex: This reworked the ingress part to reuse the internal networking ranges authorized (in ingress services). Hence the slight impact in the scheduler rpc ingress in the diff.

Tests are happy:

  • make minikube
  • make swh-helm-diff
make swh-helm-diff
[swh] Comparing changes between branches production and add-vault-rpc-template (per environment)...
Switched to branch 'production'
Your branch is up to date with 'origin/production'.
[swh] Generate config in production branch for environment staging...
Switched to branch 'add-vault-rpc-template'
[swh] Generate config in add-vault-rpc-template branch for environment staging...
Switched to branch 'production'
Your branch is up to date with 'origin/production'.
[swh] Generate config in production branch for environment production...
Switched to branch 'add-vault-rpc-template'
[swh] Generate config in add-vault-rpc-template branch for environment production...


------------- diff for environment staging -------------

--- /tmp/swh-chart.swh.kO0kfiHH/staging.before  2023-10-13 09:52:30.201051533 +0200
+++ /tmp/swh-chart.swh.kO0kfiHH/staging.after   2023-10-13 09:52:30.765050279 +0200
@@ -285,31 +285,32 @@
     fi
 ---
 # Source: swh/templates/cookers/configmap.yaml
 apiVersion: v1
 kind: ConfigMap
 metadata:
   name: cooker-batch-template
   namespace: swh
 data:
   config.yml.template: |
+
     storage:
       cls: pipeline
       steps:
       - cls: retry
       - cls: remote
         url: http://storage1.internal.staging.swh.network:5002
-
     vault:
       cls: remote
       url: http://vault.internal.staging.swh.network:5005/
     max_bundle_size: 1073741824
+
     celery:
       task_broker: amqp://swhconsumer:${AMQP_PASSWORD}@scheduler0.internal.staging.swh.network:5672/%2f
       task_modules:
         - swh.vault.cooking_tasks
       task_queues:
       - swh.vault.cooking_tasks.SWHBatchCookingTask

       sentry_settings_for_celery_tasks:
         __sentry-settings-for-celery-tasks__
   init-container-entrypoint.sh: |
@@ -368,31 +369,32 @@
       - console
 ---
 # Source: swh/templates/cookers/configmap.yaml
 apiVersion: v1
 kind: ConfigMap
 metadata:
   name: cooker-simple-template
   namespace: swh
 data:
   config.yml.template: |
+
     storage:
       cls: pipeline
       steps:
       - cls: retry
       - cls: remote
         url: http://storage1.internal.staging.swh.network:5002
-
     vault:
       cls: remote
       url: http://vault.internal.staging.swh.network:5005/
     max_bundle_size: 1073741824
+
     celery:
       task_broker: amqp://swhconsumer:${AMQP_PASSWORD}@scheduler0.internal.staging.swh.network:5672/%2f
       task_modules:
         - swh.vault.cooking_tasks
       task_queues:
       - swh.vault.cooking_tasks.SWHCookingTask

       sentry_settings_for_celery_tasks:
         __sentry-settings-for-celery-tasks__
   init-container-entrypoint.sh: |
@@ -3986,20 +3988,32 @@
 kind: ConfigMap
 metadata:
   name: toolbox-storage-template
   namespace: swh
 data:
   config.yml.template: |
     storage:
       cls: postgresql
       db: host=db1.internal.staging.swh.network port=5432 user=swh dbname=swh password=${POSTGRESQL_PASSWORD}
 ---
+# Source: swh/templates/toolbox/configmap.yaml
+apiVersion: v1
+kind: ConfigMap
+metadata:
+  name: toolbox-vault-template
+  namespace: swh
+data:
+  config.yml.template: |
+    vault:
+      cls: postgresql
+      db: host=db1.internal.staging.swh.network port=5432 user=swh-vault dbname=swh password=${POSTGRESQL_PASSWORD}
+---
 # Source: swh/templates/toolbox/script-utils-configmap.yaml
 apiVersion: v1
 kind: ConfigMap
 metadata:
   name: toolbox-script-utils
   namespace: swh
 data:
   register-task-types.sh: |
     #!/bin/bash

@@ -4106,20 +4120,35 @@
     set -eu

     /opt/swh/bin/check-db-version.sh storage

   migrate-storage-db-version.sh: |
     #!/bin/bash

     set -eu

     /opt/swh/bin/migrate-db-version.sh storage
+
+
+  check-vault-db-version.sh: |
+    #!/bin/bash
+
+    set -eu
+
+    /opt/swh/bin/check-db-version.sh vault
+
+  migrate-vault-db-version.sh: |
+    #!/bin/bash
+
+    set -eu
+
+    /opt/swh/bin/migrate-db-version.sh vault
 ---
 # Source: swh/templates/utils/database-utils.yaml
 apiVersion: v1
 kind: ConfigMap
 metadata:
   name: database-utils
   namespace: swh
 data:
   init-keyspace.py: |
     from swh.core import config
@@ -14338,21 +14367,21 @@
   strategy:
     type: RollingUpdate
     rollingUpdate:
       maxSurge: 1
   template:
     metadata:
       labels:
         app: cooker-batch
       annotations:
         # Force a rollout upgrade if the configuration changes
-        checksum/config: 0b4befd9996ad5b8b2d8eb873dcafda289d10242f273ce99129e48e46d9fd32d
+        checksum/config: b8640eee471339fd8888636ddfa22e03e3ee34ef3509ab0577c1396f6663f163
     spec:
       affinity:

         nodeAffinity:
           requiredDuringSchedulingIgnoredDuringExecution:
             nodeSelectorTerms:
             - matchExpressions:
               - key: swh/cooker
                 operator: In
                 values:
@@ -14481,21 +14510,21 @@
   strategy:
     type: RollingUpdate
     rollingUpdate:
       maxSurge: 1
   template:
     metadata:
       labels:
         app: cooker-simple
       annotations:
         # Force a rollout upgrade if the configuration changes
-        checksum/config: 0b4befd9996ad5b8b2d8eb873dcafda289d10242f273ce99129e48e46d9fd32d
+        checksum/config: b8640eee471339fd8888636ddfa22e03e3ee34ef3509ab0577c1396f6663f163
     spec:
       affinity:

         nodeAffinity:
           requiredDuringSchedulingIgnoredDuringExecution:
             nodeSelectorTerms:
             - matchExpressions:
               - key: swh/cooker
                 operator: In
                 values:
@@ -21554,22 +21583,22 @@
   strategy:
     type: RollingUpdate
     rollingUpdate:
       maxSurge: 1
   template:
     metadata:
       labels:
         app: swh-toolbox
       annotations:
         # Force a rollout upgrade if the configuration changes
-        checksum/config: ca0e0cc4f29948e07bfadf33826325a806b0219ab27ee861f1d01ebfd208d57b
-        checksum/configScript: 9558d6f3f158a27f17b6d6083a22e29f688e4c41a8332226cb246539b4b12eb0
+        checksum/config: 49c3d4f85d16ac7c5e94dec0679b8eb93a0e7d2e6952422c7e755a96beb27687
+        checksum/configScript: 220368f672b001841f8026d9a30487b10b31c0ce2229bd60477924fe814ae067
     spec:
       priorityClassName: swh-tools

       initContainers:
         - name: prepare-configuration-scheduler
           image: debian:bullseye
           imagePullPolicy: IfNotPresent
           command:
           - /bin/bash
           args:
@@ -21634,20 +21663,42 @@
               secretKeyRef:
                 name: swh-postgresql-common-secret
                 key: postgres-swh-password
                 # 'name' secret must exist & include that ^ key
                 optional: false
           volumeMounts:
           - name: configuration
             mountPath: /etc/swh
           - name: configuration-storage-template
             mountPath: /etc/swh/configuration-template
+        - name: prepare-configuration-vault
+          image: debian:bullseye
+          imagePullPolicy: IfNotPresent
+          command:
+          - /bin/bash
+          args:
+          - -c
+          - eval echo "\"$(</etc/swh/configuration-template/config.yml.template)\"" > /etc/swh/config-vault.yml
+          env:
+
+          - name: POSTGRESQL_PASSWORD
+            valueFrom:
+              secretKeyRef:
+                name: swh-vault-postgresql-secret
+                key: postgres-swh-vault-password
+                # 'name' secret must exist & include that ^ key
+                optional: false
+          volumeMounts:
+          - name: configuration
+            mountPath: /etc/swh
+          - name: configuration-vault-template
+            mountPath: /etc/swh/configuration-template
       containers:
       - name: swh-toolbox
         image: container-registry.softwareheritage.org/swh/infra/swh-apps/toolbox:20231012.1
         imagePullPolicy: IfNotPresent
         resources:
           requests:
             memory: 256Mi
             cpu: 250m
         command:
         - /bin/bash
@@ -21681,20 +21732,28 @@
             path: "config.yml.template"

       - name: configuration-storage-template
         configMap:
           name: toolbox-storage-template
           defaultMode: 0777
           items:
           - key: "config.yml.template"
             path: "config.yml.template"

+      - name: configuration-vault-template
+        configMap:
+          name: toolbox-vault-template
+          defaultMode: 0777
+          items:
+          - key: "config.yml.template"
+            path: "config.yml.template"
+
       - name: toolbox-script-utils
         configMap:
           name: toolbox-script-utils
           defaultMode: 0555
 ---
 # Source: swh/templates/scheduler/update-metrics-cronjob.yaml
 apiVersion: batch/v1
 kind: CronJob
 metadata:
   name: scheduler-update-metrics-cronjob
@@ -21809,21 +21868,21 @@
               number: 5013
     host: graphql.internal.staging.swh.network
 ---
 # Source: swh/templates/scheduler/rpc-ingress.yaml
 apiVersion: networking.k8s.io/v1
 kind: Ingress
 metadata:
   namespace: swh
   name: scheduler-rpc-ingress-default
   annotations:
-    nginx.ingress.kubernetes.io/whitelist-source-range: 192.168.130.0/24,10.42.0.0/16,10.43.0.0/16
+    nginx.ingress.kubernetes.io/whitelist-source-range: 10.42.0.0/16,10.43.0.0/16,127.0.0.0/8,192.168.130.0/24
     nginx.ingress.kubernetes.io/proxy-body-size: 4G
     nginx.ingress.kubernetes.io/proxy-connect-timeout: "90"
     nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
     nginx.ingress.kubernetes.io/proxy-request-buffering: "on"
     nginx.ingress.kubernetes.io/proxy-send-timeout: "90"

 spec:
   rules:
   - host: scheduler.internal.staging.swh.network
     http:
@@ -21836,21 +21895,21 @@
             port:
               number: 5008
 ---
 # Source: swh/templates/scheduler/rpc-ingress.yaml
 apiVersion: networking.k8s.io/v1
 kind: Ingress
 metadata:
   namespace: swh
   name: scheduler-rpc-ingress-read-only
   annotations:
-    nginx.ingress.kubernetes.io/whitelist-source-range: 192.168.130.0/24,10.42.0.0/16,10.43.0.0/16,192.168.101.0/24
+    nginx.ingress.kubernetes.io/whitelist-source-range: 10.42.0.0/16,10.43.0.0/16,127.0.0.0/8,192.168.101.0/24,192.168.130.0/24
     nginx.ingress.kubernetes.io/proxy-body-size: 4G
     nginx.ingress.kubernetes.io/proxy-connect-timeout: "90"
     nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
     nginx.ingress.kubernetes.io/proxy-request-buffering: "on"
     nginx.ingress.kubernetes.io/proxy-send-timeout: "90"

 spec:
   rules:
   - host: scheduler.internal.staging.swh.network
     http:


------------- diff for environment production -------------

--- /tmp/swh-chart.swh.kO0kfiHH/production.before       2023-10-13 09:52:31.113049505 +0200
+++ /tmp/swh-chart.swh.kO0kfiHH/production.after        2023-10-13 09:52:31.465048723 +0200
@@ -24454,21 +24454,21 @@
               number: 5013
     host: graphql.internal.softwareheritage.org
 ---
 # Source: swh/templates/scheduler/rpc-ingress.yaml
 apiVersion: networking.k8s.io/v1
 kind: Ingress
 metadata:
   namespace: swh
   name: scheduler-rpc-ingress-default
   annotations:
-    nginx.ingress.kubernetes.io/whitelist-source-range: 192.168.100.0/24,192.168.200.0/22,10.42.0.0/16,10.43.0.0/16
+    nginx.ingress.kubernetes.io/whitelist-source-range: 10.42.0.0/16,10.43.0.0/16,127.0.0.0/8,192.168.100.0/24,192.168.200.0/22
     nginx.ingress.kubernetes.io/proxy-body-size: 4G
     nginx.ingress.kubernetes.io/proxy-connect-timeout: "90"
     nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
     nginx.ingress.kubernetes.io/proxy-request-buffering: "on"
     nginx.ingress.kubernetes.io/proxy-send-timeout: "90"

 spec:
   rules:
   - host: scheduler.internal.softwareheritage.org
     http:
@@ -24481,21 +24481,21 @@
             port:
               number: 5008
 ---
 # Source: swh/templates/scheduler/rpc-ingress.yaml
 apiVersion: networking.k8s.io/v1
 kind: Ingress
 metadata:
   namespace: swh
   name: scheduler-rpc-ingress-read-only
   annotations:
-    nginx.ingress.kubernetes.io/whitelist-source-range: 192.168.100.0/24,192.168.200.0/22,10.42.0.0/16,10.43.0.0/16,192.168.101.0/24
+    nginx.ingress.kubernetes.io/whitelist-source-range: 10.42.0.0/16,10.43.0.0/16,127.0.0.0/8,192.168.100.0/24,192.168.101.0/24,192.168.200.0/22
     nginx.ingress.kubernetes.io/proxy-body-size: 4G
     nginx.ingress.kubernetes.io/proxy-connect-timeout: "90"
     nginx.ingress.kubernetes.io/proxy-read-timeout: "3600"
     nginx.ingress.kubernetes.io/proxy-request-buffering: "on"
     nginx.ingress.kubernetes.io/proxy-send-timeout: "90"

 spec:
   rules:
   - host: scheduler.internal.softwareheritage.org
     http:

Refs. swh/infra/sysadm-environment#5058 (closed)

Edited by Antoine R. Dumont

Merge request reports