Skip to content

Commit 9240159

Browse files
authored
Merge pull request #212 from bcgov/chore/pre-update-to-v3
Chore/pre update to v3
2 parents d27d94f + 18bad2b commit 9240159

File tree

9 files changed

+21
-16
lines changed

9 files changed

+21
-16
lines changed

.github/workflows/release.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ jobs:
5252
context: .
5353
dockerfile_path: Dockerfile
5454
github_token: ${{ secrets.GITHUB_TOKEN }}
55-
chart_version: 2.2.0
55+
chart_version: 2.2.1
5656
is_chart_release: true
5757
cas-airflow-dag-trigger-build:
5858
needs: [release]
@@ -65,5 +65,5 @@ jobs:
6565
context: dag-trigger
6666
dockerfile_path: dag-trigger/Dockerfile
6767
github_token: ${{ secrets.GITHUB_TOKEN }}
68-
chart_version: 1.0.20
68+
chart_version: 1.1.0
6969
is_chart_release: true

dag-trigger/airflow-dag-trigger.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ dag_config=$(echo "${2:-'e30K'}" | base64 -d) # e30K is the base64 encoding of '
5151

5252
echo "Fetching state for DAG $dag_id"
5353

54-
dag_url="$AIRFLOW_ENDPOINT/api/v1/dags/${dag_id}"
54+
dag_url="$AIRFLOW_ENDPOINT/api/v2/dags/${dag_id}"
5555
is_paused=$(_curl -u "$AIRFLOW_USERNAME":"$AIRFLOW_PASSWORD" "$dag_url" | jq .is_paused)
5656

5757
if [ "$is_paused" == "true" ]; then

dags/automated_email_test_dag.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
# two DAGs triggering the email error flow
55
"""
66
from dag_configuration import default_dag_args
7-
from airflow.operators.python_operator import PythonOperator
7+
from airflow.providers.standard.operators.python import PythonOperator
88
from trigger_k8s_cronjob import trigger_k8s_cronjob
99
from datetime import datetime, timedelta
1010
from airflow import DAG
@@ -26,10 +26,10 @@
2626
DAG_ID = os.path.basename(__file__).replace(".pyc", "").replace(".py", "")
2727
SCHEDULE_INTERVAL = None # Never execute
2828

29-
dag_local_error = DAG(f'{DAG_ID}_local_error', schedule_interval=SCHEDULE_INTERVAL,
29+
dag_local_error = DAG(f'{DAG_ID}_local_error', schedule=SCHEDULE_INTERVAL,
3030
default_args=default_args)
3131

32-
dag_cronjob_error = DAG(f'{DAG_ID}_cronjob_error', schedule_interval=SCHEDULE_INTERVAL,
32+
dag_cronjob_error = DAG(f'{DAG_ID}_cronjob_error', schedule=SCHEDULE_INTERVAL,
3333
default_args=default_args)
3434

3535

dags/fetch_and_save_dag_from_github.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,17 @@
22
from airflow.utils.dates import days_ago
33
from airflow import settings
44
from dag_configuration import default_dag_args
5-
from datetime import timedelta
5+
from datetime import datetime, timedelta
66
import urllib.request
77
import logging
88
import os
99
import time
1010

11-
@dag(default_args=default_dag_args, schedule_interval=None, start_date=days_ago(2))
11+
12+
START_DATE = datetime.now() - timedelta(days=2)
13+
14+
15+
@dag(default_args=default_dag_args, schedule=None, start_date=START_DATE)
1216
def fetch_and_save_dag_from_github(org: str = '', repo: str = '', ref: str = '', path: str = ''):
1317
"""
1418
DAG to fetch dags and store them to a disk location.

dags/walg_backups.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from airflow.operators.python_operator import PythonOperator
1+
from airflow.providers.standard.operators.python import PythonOperator
22
from exec_in_pod import exec_in_pod
33
import os
44

helm/cas-airflow-dag-trigger/Chart.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@ apiVersion: v2
22
name: cas-airflow-dag-trigger
33
description: A Helm chart for triggering remote DAGs on an existing airflow instance
44
type: application
5-
version: 1.0.21 # Changing this requires updating the image tag in .github/workflows/release.yaml
5+
version: 1.1.0 # Changing this requires updating the image tag in .github/workflows/release.yaml

helm/cas-airflow/Chart.lock

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
dependencies:
22
- name: airflow
33
repository: https://airflow.apache.org/
4-
version: 1.15.0
4+
version: 1.18.0
55
- name: terraform-bucket-provision
66
repository: https://bcgov.github.io/cas-pipeline/
77
version: 0.1.3
8-
digest: sha256:32b75d7b556866eac5929da0878f73cd3544ce7a6565a8108dee03f99665bb62
9-
generated: "2025-05-22T09:38:09.059067491-06:00"
8+
digest: sha256:98240d16977d7eabf4897ef0be279c4e85c7f8ae6ec389376b7c28212d513798
9+
generated: "2025-09-09T14:12:54.100483261-06:00"

helm/cas-airflow/Chart.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
apiVersion: v2
22
name: cas-airflow
33
type: application
4-
version: 2.2.0 # Changing this requires updating the image tag in .github/workflows/release.yaml
4+
version: 2.2.1 # Changing this requires updating the image tag in .github/workflows/release.yaml
55
appVersion: 2.10.5 # The airflow version
66
description: Helm chart to deploy cas' flavour of airflow, compatible with OpenShift 4. This chart uses the vanilla airflow chart and adds cas' own templates and values.
77
icon: https://www.astronomer.io/static/airflowNewA.png
@@ -12,7 +12,7 @@ keywords:
1212
- bcgov
1313
dependencies:
1414
- name: airflow
15-
version: "1.15.0"
15+
version: "1.18.0"
1616
repository: "https://airflow.apache.org/"
1717
- name: terraform-bucket-provision
1818
version: "0.1.3"

helm/cas-airflow/values.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,7 @@ airflow:
157157
min_serialized_dag_update_interval: 10
158158
min_serialized_dag_fetch_interval: 5
159159
dags_are_paused_at_creation: False
160+
auth_manager: "airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager"
160161
logging:
161162
# The log level should not be decreased to INFO/DEBUG,
162163
# or only temporarily, as the airflow sheduler is very verbose,
@@ -167,7 +168,7 @@ airflow:
167168
remote_base_log_folder: ~
168169
remote_log_conn_id: gcs_logs
169170
api:
170-
auth_backend: airflow.api.auth.backend.basic_auth
171+
auth_backends: airflow.api.auth.backend.basic_auth, airflow.api.auth.backend.session
171172
webserver:
172173
web_server_worker_timeout: 300
173174
workers: 2

0 commit comments

Comments
 (0)