Skip to content

Commit d3c9021

Browse files
authored
Fix variables in Chart, update tests (#23)
1 parent f609e88 commit d3c9021

File tree

19 files changed

+357
-1025
lines changed

19 files changed

+357
-1025
lines changed

CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,18 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [v1.3.0] - 2025-05-31
9+
10+
### Changed
11+
12+
- References to cert-manager-key-vault-sync in Helm Chart are now replaced with Release.Name [20](https://github.com/rdvansloten/cert-manager-key-vault-sync/issues/20)
13+
- Log output formatting
14+
15+
### Updated
16+
17+
- Tests to Go `1.24.x` from `1.21.x`
18+
- Pinned the kreuzwerker/docker Terraform provider to stable version `3.1.2`
19+
820
## [v1.2.0] - 2025-03-16
921

1022
### Added

README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ Kubernetes app that syncs [cert-manager](https://cert-manager.io) Secrets to Azu
77

88
| Component | Version | Status |
99
| ----------- | ------- | -------------------------------------------------------------------------------------------------------------------------------- |
10-
| Helm Chart | v1.2.1 | ![Helm Chart](https://github.com/rdvansloten/cert-manager-key-vault-sync/actions/workflows/build-push-helm-chart.yaml/badge.svg) |
11-
| Application | v1.2.0 | ![Docker Image](https://github.com/rdvansloten/cert-manager-key-vault-sync/actions/workflows/build-push-image.yaml/badge.svg) |
10+
| Helm Chart | v1.3.0 | ![Helm Chart](https://github.com/rdvansloten/cert-manager-key-vault-sync/actions/workflows/build-push-helm-chart.yaml/badge.svg) |
11+
| Application | v1.3.0 | ![Docker Image](https://github.com/rdvansloten/cert-manager-key-vault-sync/actions/workflows/build-push-image.yaml/badge.svg) |
1212

1313
## Features
1414

@@ -39,14 +39,14 @@ export HELM_EXPERIMENTAL_OCI=1
3939
helm upgrade --install cert-manager-key-vault-sync \
4040
oci://docker.io/rdvansloten/cert-manager-key-vault-sync-chart \
4141
--values ./charts/cert-manager-key-vault-sync-chart/values.yaml \
42-
--version v1.2.1 \
42+
--version v1.3.0 \
4343
--namespace cert-manager-key-vault-sync --create-namespace
4444
```
4545

4646
If you wish to use raw Kubernetes manifests instead, you may render the Helm template to plain YAML using the command below.
4747

4848
```sh
49-
helm template cert-manager-key-vault-sync oci://docker.io/rdvansloten/cert-manager-key-vault-sync-chart --version v1.2.1 \
49+
helm template cert-manager-key-vault-sync oci://docker.io/rdvansloten/cert-manager-key-vault-sync-chart --version v1.3.0 \
5050
--values ./charts/cert-manager-key-vault-sync-chart/values.yaml > output.yaml
5151
```
5252

app/main.py

Lines changed: 67 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -35,22 +35,22 @@
3535
key_vault_name = os.getenv("AZURE_KEY_VAULT_NAME")
3636
key_vault_uri = f"https://{key_vault_name}.vault.azure.net/"
3737
use_namespaces = os.getenv("USE_NAMESPACES", "false").lower() in ("true", "1", "yes", "enabled")
38-
check_interval = int(os.getenv("CHECK_INTERVAL", 300))
38+
check_interval = int(os.getenv("CHECK_INTERVAL", "300"))
3939
filter_annotation = os.getenv("ANNOTATION", "cert-manager.io/certificate-name")
4040
certificate_name_filter = os.getenv("CERT_NAME_FILTER", "*")
4141

4242
# GitHub version check variables
4343
github_repository_owner = os.getenv("GITHUB_REPO_OWNER", "rdvansloten")
4444
github_repository_name = os.getenv("GITHUB_REPO_NAME", "cert-manager-key-vault-sync")
4545
version_check_interval = os.getenv("VERSION_CHECK_INTERVAL", "86400")
46-
current_version = "v1.2.0"
46+
current_version = "v1.3.0"
4747
check_version = os.getenv("CHECK_VERSION", "true").lower()
4848

4949
# Leader election variables
5050
lease_name = os.getenv("LEADER_ELECTION_LEASE_NAME", "cert-manager-key-vault-sync-leader")
5151
lease_namespace = os.getenv("POD_NAMESPACE", "cert-manager-key-vault-sync")
52-
lease_duration_seconds = int(os.getenv("LEASE_DURATION_SECONDS", 60))
53-
renew_interval_seconds = int(os.getenv("RENEW_INTERVAL_SECONDS", 60))
52+
lease_duration_seconds = int(os.getenv("LEASE_DURATION_SECONDS", "60"))
53+
renew_interval_seconds = int(os.getenv("RENEW_INTERVAL_SECONDS", "60"))
5454
pod_name = os.getenv("POD_NAME", "unknown")
5555
leader_active = True
5656

@@ -59,37 +59,46 @@
5959
logging.info(f"Using Key Vault: {key_vault_uri}")
6060
logging.info(f"Using Namespace separation: {str(use_namespaces).lower()}")
6161
logging.info(f"Using certificate name filter: {certificate_name_filter}")
62-
logging.info(f"Using annotation filter: {filter_annotation}")
62+
logging.info("Using annotation filter: %s", filter_annotation)
6363
logging.info(f"Using version check interval: {version_check_interval}")
6464
logging.info(f"Using GitHub version check: {check_version}")
6565

66-
# Initialize Key Vault client
67-
try:
68-
credential = DefaultAzureCredential(exclude_interactive_browser_credential=False, additionally_allowed_tenants="*")
69-
certificate_client = CertificateClient(vault_url=key_vault_uri, credential=credential)
66+
# Initialize Kubernetes client (in-cluster config)
67+
config.load_incluster_config()
68+
k8s_client = client.CoreV1Api()
7069

71-
logging.info("Detected Key Vault Certificates:")
72-
for cert in certificate_client.list_properties_of_certificates():
73-
logging.info(f"- {cert.name}")
70+
# Azure credential and Key Vault client will be initialized after leadership is acquired
71+
credential = None
72+
certificate_client = None
7473

75-
logging.info(f"Initialized Azure Key Vault client using Key Vault '{key_vault_name}'.")
74+
def init_key_vault_client():
75+
'''Initialize the Azure Key Vault client using DefaultAzureCredential.'''
76+
global credential, certificate_client
77+
# Lazy initialization if not already done
78+
if certificate_client is not None:
79+
return
7680

77-
except ResourceNotFoundError as e:
78-
logging.error(f"Failed to connect to Key Vault '{key_vault_name}': {str(e)}")
79-
raise
81+
credential = DefaultAzureCredential(exclude_interactive_browser_credential=False, additionally_allowed_tenants="*")
82+
certificate_client = CertificateClient(vault_url=key_vault_uri, credential=credential)
8083

81-
except ServiceRequestError as e:
82-
logging.error(f"Failed to connect to Key Vault '{key_vault_name}': {str(e)}")
83-
raise
84+
try:
85+
logging.info("Detected Key Vault Certificates:")
86+
for cert in certificate_client.list_properties_of_certificates():
87+
logging.info(cert.name)
8488

85-
except Exception as e:
86-
logging.error(f"Failed to connect to Key Vault '{key_vault_name}': {str(e)}")
87-
raise
89+
logging.info(f"Initialized Azure Key Vault client using Key Vault '{key_vault_name}'.")
8890

89-
# Initialize Kubernetes client (in-cluster config)
90-
config.load_incluster_config()
91-
k8s_client = client.CoreV1Api()
91+
except ResourceNotFoundError as e:
92+
logging.error(f"Failed to connect to Key Vault '{key_vault_name}': {str(e)}")
93+
raise
9294

95+
except ServiceRequestError as e:
96+
logging.error(f"Failed to connect to Key Vault '{key_vault_name}': {str(e)}")
97+
raise
98+
99+
except Exception as e:
100+
logging.error(f"Failed to connect to Key Vault '{key_vault_name}': {str(e)}")
101+
raise
93102

94103
# Leader election functions
95104
def get_lease(api):
@@ -114,24 +123,23 @@ def create_lease(api):
114123
)
115124
try:
116125
created = api.create_namespaced_lease(lease_namespace, lease)
117-
logging.info(f"[{pod_name}] Created lease; acquired leadership.")
126+
logging.info(f"Pod {pod_name} created lease; acquired leadership.")
118127
return created
119128
except client.exceptions.ApiException as e:
120-
logging.error(f"[{pod_name}] Error creating lease: {e}")
129+
logging.error(f"Pod {pod_name} could not create a lease: {e}")
121130
return None
122131

123-
124132
def try_acquire_leadership(api):
125133
now = datetime.datetime.now(datetime.timezone.utc)
134+
# Fetch current Lease (or None if it doesn’t exist)
126135
lease = get_lease(api)
127136
if lease is None:
137+
# Try to create it (first comers win)
128138
lease = create_lease(api)
129-
if lease is not None:
130-
return True
131-
else:
132-
return False
139+
return True if lease is not None else False
133140

134141
spec = lease.spec
142+
# Determine if the existing lease has expired
135143
if spec.renew_time is None:
136144
expired = True
137145
else:
@@ -140,23 +148,32 @@ def try_acquire_leadership(api):
140148
last_renew = datetime.datetime.fromisoformat(last_renew.replace("Z", "+00:00"))
141149
expired = (now - last_renew).total_seconds() > spec.lease_duration_seconds
142150

151+
# If we already hold it or it’s expired, try to take it
143152
if spec.holder_identity == pod_name or expired:
144153
lease.spec.holder_identity = pod_name
145154
lease.spec.acquire_time = now
146155
lease.spec.renew_time = now
147156
lease.spec.lease_duration_seconds = lease_duration_seconds
157+
148158
try:
149159
api.replace_namespaced_lease(lease_name, lease_namespace, lease)
150-
logging.info(f"[{pod_name}] Acquired/renewed leadership.")
160+
logging.info(f"Pod {pod_name} acquired/renewed leadership.")
151161
return True
162+
152163
except client.exceptions.ApiException as e:
153-
logging.error(f"[{pod_name}] Failed to update lease: {e}")
154-
return False
164+
if e.status == 409:
165+
# Another pod updated the lease first—just back off
166+
logging.debug(f"Pod {pod_name} had a lease update conflict; leadership held elsewhere.")
167+
return False
168+
else:
169+
logging.error(f"Pod {pod_name} has failed to update lease: {e}")
170+
return False
171+
155172
else:
156-
logging.debug(f"[{pod_name}] Leadership held by {spec.holder_identity}.")
173+
# Someone else still holds a valid lease
174+
logging.debug(f"Leadership held by {spec.holder_identity}.")
157175
return False
158176

159-
160177
def renew_leadership(api):
161178
global leader_active
162179
while leader_active:
@@ -165,18 +182,18 @@ def renew_leadership(api):
165182
try:
166183
lease = get_lease(api)
167184
if lease is None:
168-
logging.error(f"[{pod_name}] Lease not found.")
185+
logging.error(f"Lease not found for Pod {pod_name}.")
169186
leader_active = False
170187
break
171188
if lease.spec.holder_identity != pod_name:
172-
logging.error(f"[{pod_name}] Leadership lost (current leader: {lease.spec.holder_identity}).")
189+
logging.error(f"Pod {pod_name} has lost leadership. (current leader: {lease.spec.holder_identity}).")
173190
leader_active = False
174191
break
175192
lease.spec.renew_time = now
176193
api.replace_namespaced_lease(lease_name, lease_namespace, lease)
177-
logging.info(f"[{pod_name}] Renewed leadership at {now.isoformat()}.")
194+
logging.info(f"Pod {pod_name} has renewed leadership at {now.isoformat()}.")
178195
except client.exceptions.ApiException as e:
179-
logging.error(f"[{pod_name}] Error renewing lease: {e}")
196+
logging.error(f"Pod {pod_name} had an error renewing lease: {e}")
180197
leader_active = False
181198
break
182199

@@ -372,28 +389,29 @@ def periodic_check():
372389
def main():
373390
global leader_active
374391
logging.info("Starting cert-manager-key-vault-sync process.")
375-
schedule_version_check()
376-
load_initial_state()
377-
378-
# Start Prometheus metrics server on port 8000
379-
start_http_server(8000)
380-
logging.info("Prometheus metrics server started on port 8000")
381392

382393
coordination_api = client.CoordinationV1Api()
383394
while True:
384395
if try_acquire_leadership(coordination_api):
385396
threading.Thread(target=renew_leadership, args=(coordination_api,), daemon=True).start()
386-
logging.info(f"Acquired leadership as {pod_name}. Starting sync loop.")
397+
logging.info(f"Pod {pod_name} acquired leadership. Starting sync loop.")
398+
init_key_vault_client()
399+
start_http_server(8000)
400+
logging.info("Prometheus metrics server started on port 8000")
387401
break
388402
else:
389-
logging.debug(f"Not the leader, retrying in {renew_interval_seconds} seconds.")
403+
logging.debug(f"This Pod ({pod_name}) is not the leader, retrying in {renew_interval_seconds} seconds.")
390404
time.sleep(renew_interval_seconds)
391405

392406
# Only run the following if this replica is the leader.
393407
while leader_active:
394408
sync_total.inc()
395409
sync_start = time.time()
396410
try:
411+
schedule_version_check()
412+
load_initial_state()
413+
414+
# Start Prometheus metrics server on port 8000
397415
sync_k8s_secrets_to_key_vault()
398416
sync_success_total.inc()
399417
except Exception as e:
@@ -409,6 +427,5 @@ def main():
409427
logging.error("Leadership lost, exiting process.")
410428
exit(1)
411429

412-
413430
if __name__ == "__main__":
414431
main()

app/requirements.txt

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
azure-identity==1.*
2-
azure-keyvault-certificates==4.*
3-
kubernetes==30.*
4-
requests==2.*
5-
packaging==24.*
6-
prometheus-client==0.*
1+
azure-identity==1.23.*
2+
azure-keyvault-certificates==4.9.*
3+
kubernetes==32.0.*
4+
requests==2.32.*
5+
packaging==25.*
6+
prometheus-client==0.22.*
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
apiVersion: v2
22
name: cert-manager-key-vault-sync-chart
33
description: A Helm chart for the cert-manager-key-vault-sync application.
4-
version: v1.2.1
5-
appVersion: v1.2.0
4+
version: v1.3.0
5+
appVersion: v1.3.0

charts/cert-manager-key-vault-sync-chart/templates/clusterrole.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
apiVersion: rbac.authorization.k8s.io/v1
22
kind: ClusterRole
33
metadata:
4-
name: cert-manager-key-vault-sync
4+
name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
55
rules:
66
- apiGroups: [""]
77
resources: ["secrets"]
Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
apiVersion: rbac.authorization.k8s.io/v1
22
kind: ClusterRoleBinding
33
metadata:
4-
name: cert-manager-key-vault-sync
4+
name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
55
subjects:
66
- kind: ServiceAccount
7-
name: cert-manager-key-vault-sync
7+
name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
88
namespace: {{ .Release.Namespace }}
99
roleRef:
1010
kind: ClusterRole
11-
name: cert-manager-key-vault-sync
11+
name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
1212
apiGroup: rbac.authorization.k8s.io

charts/cert-manager-key-vault-sync-chart/templates/deployment.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
apiVersion: apps/v1
22
kind: Deployment
33
metadata:
4-
name: cert-manager-key-vault-sync
4+
name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
55
namespace: {{ .Release.Namespace }}
66
labels:
7-
app.kubernetes.io/name: cert-manager-key-vault-sync
8-
app.kubernetes.io/instance: cert-manager-key-vault-sync
7+
app.kubernetes.io/name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
8+
app.kubernetes.io/instance: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
99
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
1010
app.kubernetes.io/managed-by: {{ .Release.Service }}
1111
helm.sh/chart: {{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }}
@@ -22,7 +22,7 @@ spec:
2222
app.kubernetes.io/name: cert-manager-key-vault-sync
2323
app.kubernetes.io/instance: cert-manager-key-vault-sync
2424
spec:
25-
serviceAccountName: cert-manager-key-vault-sync
25+
serviceAccountName: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
2626
containers:
2727
- name: cert-manager-key-vault-sync
2828
image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"

charts/cert-manager-key-vault-sync-chart/templates/service.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
apiVersion: v1
33
kind: Service
44
metadata:
5-
name: cert-manager-key-vault-sync
5+
name: {{ .Release.Name | default "cert-manager-key-vault-sync" }}
66
namespace: {{ .Release.Namespace }}
77
labels:
88
app.kubernetes.io/name: cert-manager-key-vault-sync

charts/cert-manager-key-vault-sync-chart/templates/serviceaccount.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@ metadata:
88
azure.workload.identity/tenant-id: {{ .Values.azure.workloadIdentity.tenantId }}
99
labels:
1010
azure.workload.identity/use: "true"
11-
app.kubernetes.io/name: {{ .Release.Name }}
12-
app.kubernetes.io/instance: {{ .Release.Name }}
11+
app.kubernetes.io/name: cert-manager-key-vault-sync
12+
app.kubernetes.io/instance: cert-manager-key-vault-sync
1313
app.kubernetes.io/managed-by: {{ .Release.Service }}
1414
{{- if .Values.imagePullSecrets }}
1515
imagePullSecrets:

0 commit comments

Comments
 (0)