Skip to content

Commit ecb6307

Browse files
authored
testserver: Fix pipelines update to set default for storage (#2960)
## Why This is the default that is set by backend if field is missing in Create/Update request. We already have Create covered, this applies it to Update. Discovered while working on terraform removal #2926 Terraform reads the pipeline from the backend and then adds those defaults to future update requests while the new deployment backend does not read anything from the backend and those fields remain empty in update requests. The end result (as measured by "pipelines get $id") is, however, the same.
1 parent d9ec150 commit ecb6307

File tree

1 file changed

+15
-10
lines changed

1 file changed

+15
-10
lines changed

libs/testserver/pipelines.go

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -40,20 +40,14 @@ func (s *FakeWorkspace) PipelineCreate(req Request) Response {
4040
r.Spec = &spec
4141

4242
pipelineId := uuid.New().String()
43-
spec.Id = pipelineId
4443
r.PipelineId = pipelineId
4544
r.CreatorUserName = "tester@databricks.com"
4645
r.LastModified = time.Now().UnixMilli()
4746
r.Name = r.Spec.Name
4847
r.RunAsUserName = "tester@databricks.com"
4948
r.State = "IDLE"
5049

51-
// If the pipeline definition does not specify a catalog, it switches to Hive metastore mode
52-
// and if the storage location is not specified, API automatically generates a storage location
53-
// (ref: https://docs.databricks.com/gcp/en/dlt/hive-metastore#specify-a-storage-location)
54-
if spec.Storage == "" && spec.Catalog == "" {
55-
spec.Storage = "dbfs:/pipelines/" + pipelineId
56-
}
50+
setSpecDefaults(&spec, pipelineId)
5751
s.Pipelines[pipelineId] = r
5852

5953
return Response{
@@ -63,11 +57,21 @@ func (s *FakeWorkspace) PipelineCreate(req Request) Response {
6357
}
6458
}
6559

60+
func setSpecDefaults(spec *pipelines.PipelineSpec, pipelineId string) {
61+
spec.Id = pipelineId
62+
// If the pipeline definition does not specify a catalog, it switches to Hive metastore mode
63+
// and if the storage location is not specified, API automatically generates a storage location
64+
// (ref: https://docs.databricks.com/gcp/en/dlt/hive-metastore#specify-a-storage-location)
65+
if spec.Storage == "" && spec.Catalog == "" {
66+
spec.Storage = "dbfs:/pipelines/" + pipelineId
67+
}
68+
}
69+
6670
func (s *FakeWorkspace) PipelineUpdate(req Request, pipelineId string) Response {
6771
defer s.LockUnlock()()
6872

69-
var request pipelines.PipelineSpec
70-
err := json.Unmarshal(req.Body, &request)
73+
var spec pipelines.PipelineSpec
74+
err := json.Unmarshal(req.Body, &spec)
7175
if err != nil {
7276
return Response{
7377
Body: fmt.Sprintf("internal error: %s", err),
@@ -82,7 +86,8 @@ func (s *FakeWorkspace) PipelineUpdate(req Request, pipelineId string) Response
8286
}
8387
}
8488

85-
item.Spec = &request
89+
item.Spec = &spec
90+
setSpecDefaults(&spec, pipelineId)
8691
s.Pipelines[pipelineId] = item
8792

8893
return Response{

0 commit comments

Comments
 (0)