Skip to content

Commit 261a3a4

Browse files
authored
chore: Auto-generates singular data source schemas in TPF resources (#2799)
* DataSourceSchemaFromResource * auto-generate mongodbemployeeaccessgrant data source schema * TestDataSourceSchemaFromResource * extract encryptionatrest resource schema * fix linter * Revert "fix linter" This reverts commit a2f7048. * Revert "extract encryptionatrest resource schema" This reverts commit f0da497. * TestDataSourceSchemasTemporary * pushbasedlog * ignore timeouts * description update in search_deployment so it's the same for resource and data source schema * search_deployment * fail if unused required fields are passed * fixes search_deployment * test for ListNestedAttribute * adapt doc * refactor DataSourceSchemaFromResource * typo * use attrTyped.MarkdownDescription * update adv cluster tpf * add panic info * test for all cases * use ignore and required fields only in first level * reflect to improve convertAttrs * removed old data source schemas * remove temporary tests * simplify MarkdownDescription set * rename files to schema_generation
1 parent 2a8e2ab commit 261a3a4

File tree

14 files changed

+486
-382
lines changed

14 files changed

+486
-382
lines changed

docs/resources/search_deployment.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ output "mongodbatlas_search_deployment_id" {
5959
### Required
6060

6161
- `cluster_name` (String) Label that identifies the cluster to return the search nodes for.
62-
- `project_id` (String) Unique 24-hexadecimal character string that identifies the project.
62+
- `project_id` (String) Unique 24-hexadecimal digit string that identifies your project.
6363
- `specs` (Attributes List) List of settings that configure the search nodes for your cluster. This list is currently limited to defining a single element. (see [below for nested schema](#nestedatt--specs))
6464

6565
### Optional

internal/common/conversion/schema_description.go

Lines changed: 0 additions & 74 deletions
This file was deleted.

internal/common/conversion/schema_description_test.go

Lines changed: 0 additions & 166 deletions
This file was deleted.
Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
package conversion
2+
3+
import (
4+
"reflect"
5+
"slices"
6+
7+
dsschema "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
8+
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
9+
)
10+
11+
func DataSourceSchemaFromResource(rs schema.Schema, requiredFields ...string) dsschema.Schema {
12+
ignoreFields := []string{"timeouts"}
13+
if len(rs.Blocks) > 0 {
14+
panic("blocks not supported yet")
15+
}
16+
ds := dsschema.Schema{
17+
Attributes: convertAttrs(rs.Attributes, requiredFields, ignoreFields),
18+
}
19+
UpdateSchemaDescription(&ds)
20+
return ds
21+
}
22+
23+
func UpdateSchemaDescription[T schema.Schema | dsschema.Schema](s *T) {
24+
UpdateAttr(s)
25+
}
26+
27+
var convertMappings = map[string]reflect.Type{
28+
"StringAttribute": reflect.TypeOf(dsschema.StringAttribute{}),
29+
"BoolAttribute": reflect.TypeOf(dsschema.BoolAttribute{}),
30+
"Int64Attribute": reflect.TypeOf(dsschema.Int64Attribute{}),
31+
"Float64Attribute": reflect.TypeOf(dsschema.Float64Attribute{}),
32+
"MapAttribute": reflect.TypeOf(dsschema.MapAttribute{}),
33+
"SingleNestedAttribute": reflect.TypeOf(dsschema.SingleNestedAttribute{}),
34+
"ListNestedAttribute": reflect.TypeOf(dsschema.ListNestedAttribute{}),
35+
"SetNestedAttribute": reflect.TypeOf(dsschema.SetNestedAttribute{}),
36+
}
37+
38+
func convertAttrs(rsAttrs map[string]schema.Attribute, requiredFields, ignoreFields []string) map[string]dsschema.Attribute {
39+
dsAttrs := make(map[string]dsschema.Attribute, len(rsAttrs))
40+
for name, attr := range rsAttrs {
41+
if slices.Contains(ignoreFields, name) {
42+
continue
43+
}
44+
computed := true
45+
required := false
46+
if slices.Contains(requiredFields, name) {
47+
computed = false
48+
required = true
49+
}
50+
vSrc := reflect.ValueOf(attr)
51+
tSrc := reflect.TypeOf(attr)
52+
tDst := convertMappings[tSrc.Name()]
53+
if tDst == nil {
54+
panic("attribute type not support yet, add it to convertMappings: " + tSrc.Name())
55+
}
56+
vDest := reflect.New(tDst).Elem()
57+
vDest.FieldByName("MarkdownDescription").Set(vSrc.FieldByName("MarkdownDescription"))
58+
vDest.FieldByName("Computed").SetBool(computed)
59+
vDest.FieldByName("Required").SetBool(required)
60+
// ElementType is in schema.MapAttribute
61+
if fElementType := vDest.FieldByName("ElementType"); fElementType.IsValid() && fElementType.CanSet() {
62+
fElementType.Set(vSrc.FieldByName("ElementType"))
63+
}
64+
// Attributes is in schema.SingleNestedAttribute
65+
if fAttributes := vDest.FieldByName("Attributes"); fAttributes.IsValid() && fAttributes.CanSet() {
66+
attrsSrc := vSrc.FieldByName("Attributes").Interface().(map[string]schema.Attribute)
67+
fAttributes.Set(reflect.ValueOf(convertAttrs(attrsSrc, nil, nil)))
68+
}
69+
// NestedObject is in schema.ListNestedAttribute and schema.SetNestedAttribute
70+
if fNestedObject := vDest.FieldByName("NestedObject"); fNestedObject.IsValid() && fNestedObject.CanSet() {
71+
attrsSrc := vSrc.FieldByName("NestedObject").FieldByName("Attributes").Interface().(map[string]schema.Attribute)
72+
nested := dsschema.NestedAttributeObject{
73+
Attributes: convertAttrs(attrsSrc, nil, nil),
74+
}
75+
fNestedObject.Set(reflect.ValueOf(nested))
76+
}
77+
dsAttrs[name] = vDest.Interface().(dsschema.Attribute)
78+
}
79+
return dsAttrs
80+
}
81+
82+
// UpdateAttr is exported for testing purposes only and should not be used directly.
83+
func UpdateAttr(attr any) {
84+
ptr := reflect.ValueOf(attr)
85+
if ptr.Kind() != reflect.Ptr {
86+
panic("not ptr, please fix caller")
87+
}
88+
v := ptr.Elem()
89+
if v.Kind() != reflect.Struct {
90+
panic("not struct, please fix caller")
91+
}
92+
updateDesc(v)
93+
updateMap(v, "Attributes")
94+
updateMap(v, "Blocks")
95+
updateNested(v, "NestedObject")
96+
}
97+
98+
func updateDesc(v reflect.Value) {
99+
fDescr, fMDDescr := v.FieldByName("Description"), v.FieldByName("MarkdownDescription")
100+
if !fDescr.IsValid() || !fMDDescr.IsValid() {
101+
return
102+
}
103+
if !fDescr.CanSet() || fDescr.Kind() != reflect.String ||
104+
!fMDDescr.CanSet() || fMDDescr.Kind() != reflect.String {
105+
panic("invalid desc fields, please fix caller")
106+
}
107+
strDescr, strMDDescr := fDescr.String(), fMDDescr.String()
108+
if strDescr != "" && strMDDescr != "" {
109+
panic("both descriptions exist, please fix caller: " + strDescr)
110+
}
111+
if strDescr == "" {
112+
fDescr.SetString(fMDDescr.String())
113+
} else {
114+
fMDDescr.SetString(fDescr.String())
115+
}
116+
}
117+
118+
func updateMap(v reflect.Value, mapName string) {
119+
f := v.FieldByName(mapName)
120+
if !f.IsValid() {
121+
return
122+
}
123+
if f.Kind() != reflect.Map {
124+
panic("not map, please fix caller: " + mapName)
125+
}
126+
for _, k := range f.MapKeys() {
127+
v := f.MapIndex(k).Elem()
128+
newPtr := reflect.New(v.Type())
129+
newPtr.Elem().Set(v)
130+
UpdateAttr(newPtr.Interface())
131+
f.SetMapIndex(k, newPtr.Elem())
132+
}
133+
}
134+
135+
func updateNested(v reflect.Value, nestedName string) {
136+
f := v.FieldByName(nestedName)
137+
if !f.IsValid() {
138+
return
139+
}
140+
ptr := f.Addr()
141+
UpdateAttr(ptr.Interface())
142+
}

0 commit comments

Comments
 (0)