Skip to content

Feature/resource lookup and deployment #30

Feature/resource lookup and deployment

Feature/resource lookup and deployment #30

Workflow file for this run

name: "CI"
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
permissions:
contents: read
jobs:
ci-local:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.12"]
steps:
- uses: actions/checkout@v4
- name: Install uv and set the python version
uses: astral-sh/setup-uv@v5
with:
version: "0.9.16"
python-version: ${{ matrix.python-version }}
enable-cache: true
- name: Setup Java JDK
uses: actions/setup-java@v4.2.1
with:
java-version: 17
distribution: "zulu"
- name: Install the project
run: uv sync --locked --all-extras
- name: Run code checks
run: uv run ruff check
- name: Check code formatting
run: uv run ruff format --check
- name: Run tests
run: uv run pytest -v
ci-databricks:
needs: ci-local
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.12"]
env:
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
steps:
- uses: actions/checkout@v4
- name: Install uv and set the python version
uses: astral-sh/setup-uv@v5
with:
version: "0.9.16"
python-version: ${{ matrix.python-version }}
enable-cache: true
- uses: databricks/setup-cli@main
with:
version: 0.280.0
- name: Install the project
run: uv sync --locked --all-extras
- name: Install Databricks Connect
run: |
uv pip uninstall pyspark
uv pip install databricks-connect==17.2.*
- name: Check Databricks CLI
run: databricks current-user me
- name: Run tests
run: uv run --no-sync pytest -v