Skip to content

Commit 343e13d

Browse files
committed
WIP - deploying without terraform
1 parent e202664 commit 343e13d

File tree

49 files changed

+1664
-64
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+1664
-64
lines changed

.github/workflows/push.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,15 +42,24 @@ jobs:
4242
- macos-latest
4343
- ubuntu-latest
4444
- windows-latest
45+
deployment:
46+
- "terraform"
47+
- "direct"
4548

4649
steps:
4750
- name: Checkout repository and submodules
4851
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
4952

53+
- name: Create deployment-specific cache identifier
54+
run: echo "${{ matrix.deployment }}" > deployment-type.txt
55+
5056
- name: Setup Go
5157
uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
5258
with:
5359
go-version-file: go.mod
60+
cache-dependency-path: |
61+
go.sum
62+
deployment-type.txt
5463
5564
- name: Setup Python
5665
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
@@ -72,11 +81,15 @@ jobs:
7281
# and would like to run the tests as fast as possible. We run it on schedule as well, because that is what
7382
# populates the cache and cache may include test results.
7483
if: ${{ github.event_name == 'pull_request' || github.event_name == 'schedule' }}
84+
env:
85+
DATABRICKS_CLI_DEPLOYMENT: ${{ matrix.deployment }}
7586
run: make test
7687

7788
- name: Run tests with coverage
7889
# Still run 'make cover' on push to main and merge checks to make sure it does not get broken.
7990
if: ${{ github.event_name != 'pull_request' && github.event_name != 'schedule' }}
91+
env:
92+
DATABRICKS_CLI_DEPLOYMENT: ${{ matrix.deployment }}
8093
run: make cover
8194

8295
- name: Analyze slow tests

acceptance/acceptance_test.go

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ import (
3636
"github.com/stretchr/testify/require"
3737
)
3838

39+
const deploymentEnvVar = "DATABRICKS_CLI_DEPLOYMENT"
40+
3941
var (
4042
KeepTmp bool
4143
NoRepl bool
@@ -214,6 +216,15 @@ func testAccept(t *testing.T, inprocessMode bool, singleTest string) int {
214216
totalDirs := 0
215217
selectedDirs := 0
216218

219+
deploymentFilter, hasFilter := os.LookupEnv(deploymentEnvVar)
220+
filterEnv := ""
221+
if hasFilter {
222+
filterEnv = deploymentEnvVar + "=" + deploymentFilter
223+
224+
// Clear it just to be sure, since it's going to be part of os.Environ() and we're going to add different value based on settings.
225+
t.Setenv(deploymentEnvVar, "")
226+
}
227+
217228
for _, dir := range testDirs {
218229
totalDirs += 1
219230

@@ -245,15 +256,15 @@ func testAccept(t *testing.T, inprocessMode bool, singleTest string) int {
245256
if len(expanded[0]) > 0 {
246257
t.Logf("Running test with env %v", expanded[0])
247258
}
248-
runTest(t, dir, 0, coverDir, repls.Clone(), config, configPath, expanded[0], inprocessMode)
259+
runTest(t, dir, 0, coverDir, repls.Clone(), config, configPath, expanded[0], inprocessMode, filterEnv)
249260
} else {
250261
for ind, envset := range expanded {
251262
envname := strings.Join(envset, "/")
252263
t.Run(envname, func(t *testing.T) {
253264
if !inprocessMode {
254265
t.Parallel()
255266
}
256-
runTest(t, dir, ind, coverDir, repls.Clone(), config, configPath, envset, inprocessMode)
267+
runTest(t, dir, ind, coverDir, repls.Clone(), config, configPath, envset, inprocessMode, filterEnv)
257268
})
258269
}
259270
}
@@ -354,6 +365,7 @@ func runTest(t *testing.T,
354365
configPath string,
355366
customEnv []string,
356367
inprocessMode bool,
368+
filterEnv string,
357369
) {
358370
if LogConfig {
359371
configBytes, err := json.MarshalIndent(config, "", " ")
@@ -485,6 +497,23 @@ func runTest(t *testing.T,
485497
cmd.Env = addEnvVar(t, cmd.Env, &repls, key, value, config.EnvRepl, len(config.EnvMatrix[key]) > 1)
486498
}
487499

500+
if filterEnv != "" {
501+
filterEnvKey := strings.Split(filterEnv, "=")[0]
502+
for ind := range cmd.Env {
503+
// Search backwards, because the latest settings is what is actually applicable.
504+
// For cases like DATABRICKS_CLI_DEPLOYMENT, there will be 2 instances: one copied from os.Environ,
505+
// the other one coming from EnvMatrix.
506+
envPair := cmd.Env[len(cmd.Env)-1-ind]
507+
if strings.Split(envPair, "=")[0] == filterEnvKey {
508+
if envPair == filterEnv {
509+
break
510+
} else {
511+
t.Skipf("Skipping because test environment %s does not match requested env %s", envPair, filterEnv)
512+
}
513+
}
514+
}
515+
}
516+
488517
absDir, err := filepath.Abs(dir)
489518
require.NoError(t, err)
490519
cmd.Env = append(cmd.Env, "TESTDIR="+absDir)

acceptance/bin/read_id.py

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
#!/usr/bin/env python3
2+
"""
3+
Print selected attributes from terraform state.
4+
5+
Usage: <section> <name> [attr...]
6+
"""
7+
8+
import sys
9+
import os
10+
import json
11+
12+
13+
def print_resource_terraform(section, name):
14+
resource_type = "databricks_" + section[:-1]
15+
filename = ".databricks/bundle/default/terraform/terraform.tfstate"
16+
raw = open(filename).read()
17+
data = json.loads(raw)
18+
found = 0
19+
for r in data["resources"]:
20+
r_type = r["type"]
21+
r_name = r["name"]
22+
if r_type != resource_type:
23+
continue
24+
if r_name != name:
25+
continue
26+
for inst in r["instances"]:
27+
attribute_values = inst.get("attributes") or {}
28+
print(attribute_values.get("id"))
29+
return
30+
31+
32+
def print_resource_terranova(section, name):
33+
filename = ".databricks/bundle/default/resources.json"
34+
raw = open(filename).read()
35+
data = json.loads(raw)
36+
resources = data["resources"].get(section, {})
37+
result = resources.get(name)
38+
if result is None:
39+
print(f"Resource {section=} {name=} not found. Available: {raw}")
40+
return
41+
print(result.get("__id__"))
42+
43+
44+
if os.environ.get("DATABRICKS_CLI_DEPLOYMENT") == "direct":
45+
print_resource_terranova(*sys.argv[1:])
46+
else:
47+
print_resource_terraform(*sys.argv[1:])

acceptance/bin/read_state.py

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,17 +13,15 @@
1313
def print_resource_terraform(section, name, *attrs):
1414
resource_type = "databricks_" + section[:-1]
1515
filename = ".databricks/bundle/default/terraform/terraform.tfstate"
16-
data = json.load(open(filename))
17-
available = []
16+
raw = open(filename).read()
17+
data = json.loads(raw)
1818
found = 0
1919
for r in data["resources"]:
2020
r_type = r["type"]
2121
r_name = r["name"]
2222
if r_type != resource_type:
23-
available.append((r_type, r_name))
2423
continue
2524
if r_name != name:
26-
available.append((r_type, r_name))
2725
continue
2826
for inst in r["instances"]:
2927
attribute_values = inst.get("attributes")
@@ -32,7 +30,25 @@ def print_resource_terraform(section, name, *attrs):
3230
print(section, name, " ".join(values))
3331
found += 1
3432
if not found:
35-
print(f"Resource {(resource_type, name)} not found. Available: {available}")
33+
print(f"Resource {section=} {name=} {resource_type=} not found. Available: {raw}")
3634

3735

38-
print_resource_terraform(*sys.argv[1:])
36+
def print_resource_terranova(section, name, *attrs):
37+
filename = ".databricks/bundle/default/resources.json"
38+
raw = open(filename).read()
39+
data = json.loads(raw)
40+
resources = data["resources"].get(section, {})
41+
result = resources.get(name)
42+
if result is None:
43+
print(f"Resource {section=} {name=} not found. Available: {raw}")
44+
return
45+
state = result["state"]
46+
state.setdefault("id", result.get("__id__"))
47+
values = [f"{x}={state.get(x)!r}" for x in attrs]
48+
print(section, name, " ".join(values))
49+
50+
51+
if os.environ.get("DATABRICKS_CLI_DEPLOYMENT") == "direct":
52+
print_resource_terranova(*sys.argv[1:])
53+
else:
54+
print_resource_terraform(*sys.argv[1:])

acceptance/bundle/artifacts/whl_dynamic/test.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
# Terraform sorts tasks
2+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
3+
14
[[Repls]]
25
Old = '\\\\'
36
New = '/'

acceptance/bundle/artifacts/whl_prebuilt_outside_dynamic/test.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
BundleConfig.default_name = ""
2+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"] # need to sort tasks by key
23

34
[[Repls]]
45
Old = '\\'

acceptance/bundle/debug/test.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
# Debug output is naturally different. TODO: split debug tests in two: terraform and terranova
2+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
3+
14
[[Repls]]
25
# The keys are unsorted and also vary per OS
36
Old = 'Environment variables for Terraform: ([A-Z_ ,]+) '

acceptance/bundle/deploy/dashboard/simple/test.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ Local = true
22
Cloud = true
33
RequiresWarehouse = true
44

5+
# dashboards not implemented yet
6+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
7+
58
Ignore = [
69
"databricks.yml",
710
]

acceptance/bundle/deploy/fail-on-active-runs/test.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
RecordRequests = true
22

3+
# --fail-on-active-runs not implemented yet
4+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
5+
36
[[Server]]
47
Pattern = "GET /api/2.2/jobs/runs/list"
58
Response.Body = '''
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Local = true
22
Cloud = true
33

4+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"] # "bundle destroy"
5+
46
Ignore = [
57
"databricks.yml",
68
]
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Local = true
22
Cloud = true
33

4+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
5+
46
Ignore = [
57
"databricks.yml",
68
]

acceptance/bundle/deploy/pipeline/auto-approve/test.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Local = true
22
Cloud = true
33

4+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
5+
46
Ignore = [
57
"databricks.yml"
68
]

acceptance/bundle/deploy/pipeline/recreate/test.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ Local = true
22
Cloud = true
33
RequiresUnityCatalog = true
44

5+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
6+
57
Ignore = [
68
"databricks.yml"
79
]

acceptance/bundle/deploy/schema/auto-approve/test.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ Local = true
22
Cloud = true
33
RequiresUnityCatalog = true
44

5+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
6+
57
Ignore = [
68
"databricks.yml",
79
"test-file-*.txt",

acceptance/bundle/deploy/secret-scope/test.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Cloud = true
22
Local = true
33

4+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
5+
46
Ignore = [
57
"databricks.yml",
68
]
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
11
BundleConfig.default_name.bundle.name = "test-bundle-$UNIQUE_NAME"
22
BundleConfigTarget = "databricks.yml.tmpl"
3+
4+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# "bundle summary" is not implemented
2+
EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = ["terraform"]

acceptance/bundle/resources/apps/output.txt

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,12 @@ Deployment complete!
77

88
>>> print_requests
99
{
10-
"method": "POST",
11-
"path": "/api/2.0/apps",
1210
"body": {
1311
"description": "my_app_description",
1412
"name": "myapp"
15-
}
13+
},
14+
"method": "POST",
15+
"path": "/api/2.0/apps"
1616
}
1717
apps myapp name='myapp' description='my_app_description'
1818

@@ -27,12 +27,11 @@ Deployment complete!
2727

2828
>>> print_requests
2929
{
30-
"method": "PATCH",
31-
"path": "/api/2.0/apps/myapp",
3230
"body": {
3331
"description": "MY_APP_DESCRIPTION",
34-
"name": "myapp",
35-
"url": "myapp-123.cloud.databricksapps.com"
36-
}
32+
"name": "myapp"
33+
},
34+
"method": "PATCH",
35+
"path": "/api/2.0/apps/myapp"
3736
}
3837
apps myapp name='myapp' description='MY_APP_DESCRIPTION'

acceptance/bundle/resources/apps/script

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
print_requests() {
2-
jq 'select(.method != "GET" and (.path | contains("/apps")))' < out.requests.txt
2+
# url is output-only field that terraform adds but that is ignored by the backend
3+
jq --sort-keys 'select(.method != "GET" and (.path | contains("/apps"))) | (.body.url = null | del(.body.url))' < out.requests.txt
34
rm out.requests.txt
45
}
56

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
bundle:
2+
name: acc-$UNIQUE_NAME
3+
4+
resources:
5+
pipelines:
6+
my:
7+
name: test-pipeline-$UNIQUE_NAME
8+
libraries:
9+
- file:
10+
path: "./foo.py"

0 commit comments

Comments
 (0)