diff --git a/README.md b/README.md index 96b766c..1cbf428 100644 --- a/README.md +++ b/README.md @@ -74,8 +74,7 @@ module "databricks_locations" { | Name | Version | |------|---------| -| [terraform](#requirement\_terraform) | >=1.0.0 | -| [azurerm](#requirement\_azurerm) | >= 4.0.1 | +| [terraform](#requirement\_terraform) | ~>1.3 | | [databricks](#requirement\_databricks) | ~>1.0 | ## Providers @@ -101,8 +100,9 @@ No modules. | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| -| [external\_locations](#input\_external\_locations) | List of object with external location configuration attributes |
list(object({| `[]` | no | -| [storage\_credential](#input\_storage\_credential) | Object with storage credentials configuration attributes |
index = string # Index of instance, for example short name, used later to access exact external location in output map
name = string # Custom whole name of resource
url = string # Path URL in cloud storage
credentials_name = optional(string)
owner = optional(string) # Owner of resource
skip_validation = optional(bool, true) # Suppress validation errors if any & force save the external location
read_only = optional(bool, false) # Indicates whether the external location is read-only.
force_destroy = optional(bool, true)
force_update = optional(bool, true)
comment = optional(string, "External location provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
}))
object({| n/a | yes | +| [cloud](#input\_cloud) | Cloud (azure, aws or gcp) | `string` | n/a | yes | +| [external\_locations](#input\_external\_locations) | List of object with external location configuration attributes |
azure_access_connector_id = optional(string, null) # Azure Databricks Access Connector Id
cloud = optional(string, "")
name = optional(string, null) # Custom whole name of resource
owner = optional(string) # Owner of resource
force_destroy = optional(bool, true)
comment = optional(string, "Managed identity credential provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
})
list(object({| `[]` | no | +| [storage\_credential](#input\_storage\_credential) | Object with storage credentials configuration attributes |
index = string # Index of instance, for example short name, used later to access exact external location in output map
name = string # Custom whole name of resource
url = string # Path URL in cloud storage
credentials_name = optional(string) # If storage_credential.create_storage_credential is set to false, provide id of existing storage credential here
owner = optional(string) # Owner of resource
skip_validation = optional(bool, true) # Suppress validation errors if any & force save the external location
read_only = optional(bool, false) # Indicates whether the external location is read-only.
force_destroy = optional(bool, true)
force_update = optional(bool, true)
comment = optional(string, "External location provisioned by Terraform")
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
}))
object({| n/a | yes | ## Outputs diff --git a/main.tf b/main.tf index ebe0b5c..c1301c7 100644 --- a/main.tf +++ b/main.tf @@ -13,14 +13,14 @@ locals { } resource "databricks_storage_credential" "this" { - count = var.storage_credential.cloud != "" ? 1 : 0 + count = var.storage_credential.create_storage_credential ? 1 : 0 name = var.storage_credential.name owner = var.storage_credential.owner # Dynamic block for Azure dynamic "azure_managed_identity" { - for_each = var.storage_credential.cloud == "azure" ? [1] : [] + for_each = var.cloud == "azure" ? [1] : [] content { access_connector_id = var.storage_credential.azure_access_connector_id } @@ -28,17 +28,17 @@ resource "databricks_storage_credential" "this" { # Dynamic block for GCP dynamic "databricks_gcp_service_account" { - for_each = var.storage_credential.cloud == "gcp" ? [1] : [] + for_each = var.cloud == "gcp" ? [1] : [] content {} } force_destroy = var.storage_credential.force_destroy comment = var.storage_credential.comment - isolation_mode = var.storage_credential.cloud == "azure" ? var.storage_credential.isolation_mode : null + isolation_mode = var.cloud == "azure" ? var.storage_credential.isolation_mode : null } resource "databricks_grants" "credential" { - count = var.storage_credential.cloud != "" ? 1 : 0 + count = var.storage_credential.create_storage_credential ? (length(var.storage_credential.permissions) != 0 ? 1 : 0) : 0 storage_credential = try(databricks_storage_credential.this[0].id, null) dynamic "grant" { diff --git a/variables.tf b/variables.tf index b0ad60b..858a24d 100644 --- a/variables.tf +++ b/variables.tf @@ -1,11 +1,11 @@ variable "storage_credential" { type = object({ azure_access_connector_id = optional(string, null) # Azure Databricks Access Connector Id - cloud = optional(string, "") - name = optional(string, null) # Custom whole name of resource + name = optional(string, null) # Custom whole name of resource owner = optional(string) # Owner of resource force_destroy = optional(bool, true) comment = optional(string, "Managed identity credential provisioned by Terraform") + create_storage_credential = optional(bool, true) # "Boolean flag that determines whether to create storage credential or use the existing one" permissions = optional(set(object({ principal = string privileges = list(string) @@ -15,12 +15,17 @@ variable "storage_credential" { description = "Object with storage credentials configuration attributes" } +variable "cloud" { + type = string + description = "Cloud (azure, aws or gcp)" +} + variable "external_locations" { type = list(object({ - index = string # Index of instance, for example short name, used later to access exact external location in output map - name = string # Custom whole name of resource - url = string # Path URL in cloud storage - credentials_name = optional(string) + index = string # Index of instance, for example short name, used later to access exact external location in output map + name = string # Custom whole name of resource + url = string # Path URL in cloud storage + credentials_name = optional(string) # If storage_credential.create_storage_credential is set to false, provide id of existing storage credential here owner = optional(string) # Owner of resource skip_validation = optional(bool, true) # Suppress validation errors if any & force save the external location read_only = optional(bool, false) # Indicates whether the external location is read-only. diff --git a/versions.tf b/versions.tf index 6f593ff..ffc6c84 100644 --- a/versions.tf +++ b/versions.tf @@ -1,11 +1,7 @@ terraform { - required_version = ">=1.0.0" + required_version = "~>1.3" required_providers { - azurerm = { - source = "hashicorp/azurerm" - version = ">= 4.0.1" - } databricks = { source = "databricks/databricks" version = "~>1.0"
azure_access_connector_id = optional(string, null) # Azure Databricks Access Connector Id
name = optional(string, null) # Custom whole name of resource
owner = optional(string) # Owner of resource
force_destroy = optional(bool, true)
comment = optional(string, "Managed identity credential provisioned by Terraform")
create_storage_credential = optional(bool, true) # "Boolean flag that determines whether to create storage credential or use the existing one"
permissions = optional(set(object({
principal = string
privileges = list(string)
})), [])
isolation_mode = optional(string, "ISOLATION_MODE_OPEN")
})