|
| 1 | +package conversion |
| 2 | + |
| 3 | +import ( |
| 4 | + "reflect" |
| 5 | + "slices" |
| 6 | + |
| 7 | + dsschema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" |
| 8 | + "github.com/hashicorp/terraform-plugin-framework/resource/schema" |
| 9 | +) |
| 10 | + |
| 11 | +func DataSourceSchemaFromResource(rs schema.Schema, requiredFields ...string) dsschema.Schema { |
| 12 | + ignoreFields := []string{"timeouts"} |
| 13 | + if len(rs.Blocks) > 0 { |
| 14 | + panic("blocks not supported yet") |
| 15 | + } |
| 16 | + ds := dsschema.Schema{ |
| 17 | + Attributes: convertAttrs(rs.Attributes, requiredFields, ignoreFields), |
| 18 | + } |
| 19 | + UpdateSchemaDescription(&ds) |
| 20 | + return ds |
| 21 | +} |
| 22 | + |
| 23 | +func UpdateSchemaDescription[T schema.Schema | dsschema.Schema](s *T) { |
| 24 | + UpdateAttr(s) |
| 25 | +} |
| 26 | + |
| 27 | +var convertMappings = map[string]reflect.Type{ |
| 28 | + "StringAttribute": reflect.TypeOf(dsschema.StringAttribute{}), |
| 29 | + "BoolAttribute": reflect.TypeOf(dsschema.BoolAttribute{}), |
| 30 | + "Int64Attribute": reflect.TypeOf(dsschema.Int64Attribute{}), |
| 31 | + "Float64Attribute": reflect.TypeOf(dsschema.Float64Attribute{}), |
| 32 | + "MapAttribute": reflect.TypeOf(dsschema.MapAttribute{}), |
| 33 | + "SingleNestedAttribute": reflect.TypeOf(dsschema.SingleNestedAttribute{}), |
| 34 | + "ListNestedAttribute": reflect.TypeOf(dsschema.ListNestedAttribute{}), |
| 35 | + "SetNestedAttribute": reflect.TypeOf(dsschema.SetNestedAttribute{}), |
| 36 | +} |
| 37 | + |
| 38 | +func convertAttrs(rsAttrs map[string]schema.Attribute, requiredFields, ignoreFields []string) map[string]dsschema.Attribute { |
| 39 | + dsAttrs := make(map[string]dsschema.Attribute, len(rsAttrs)) |
| 40 | + for name, attr := range rsAttrs { |
| 41 | + if slices.Contains(ignoreFields, name) { |
| 42 | + continue |
| 43 | + } |
| 44 | + computed := true |
| 45 | + required := false |
| 46 | + if slices.Contains(requiredFields, name) { |
| 47 | + computed = false |
| 48 | + required = true |
| 49 | + } |
| 50 | + vSrc := reflect.ValueOf(attr) |
| 51 | + tSrc := reflect.TypeOf(attr) |
| 52 | + tDst := convertMappings[tSrc.Name()] |
| 53 | + if tDst == nil { |
| 54 | + panic("attribute type not support yet, add it to convertMappings: " + tSrc.Name()) |
| 55 | + } |
| 56 | + vDest := reflect.New(tDst).Elem() |
| 57 | + vDest.FieldByName("MarkdownDescription").Set(vSrc.FieldByName("MarkdownDescription")) |
| 58 | + vDest.FieldByName("Computed").SetBool(computed) |
| 59 | + vDest.FieldByName("Required").SetBool(required) |
| 60 | + // ElementType is in schema.MapAttribute |
| 61 | + if fElementType := vDest.FieldByName("ElementType"); fElementType.IsValid() && fElementType.CanSet() { |
| 62 | + fElementType.Set(vSrc.FieldByName("ElementType")) |
| 63 | + } |
| 64 | + // Attributes is in schema.SingleNestedAttribute |
| 65 | + if fAttributes := vDest.FieldByName("Attributes"); fAttributes.IsValid() && fAttributes.CanSet() { |
| 66 | + attrsSrc := vSrc.FieldByName("Attributes").Interface().(map[string]schema.Attribute) |
| 67 | + fAttributes.Set(reflect.ValueOf(convertAttrs(attrsSrc, nil, nil))) |
| 68 | + } |
| 69 | + // NestedObject is in schema.ListNestedAttribute and schema.SetNestedAttribute |
| 70 | + if fNestedObject := vDest.FieldByName("NestedObject"); fNestedObject.IsValid() && fNestedObject.CanSet() { |
| 71 | + attrsSrc := vSrc.FieldByName("NestedObject").FieldByName("Attributes").Interface().(map[string]schema.Attribute) |
| 72 | + nested := dsschema.NestedAttributeObject{ |
| 73 | + Attributes: convertAttrs(attrsSrc, nil, nil), |
| 74 | + } |
| 75 | + fNestedObject.Set(reflect.ValueOf(nested)) |
| 76 | + } |
| 77 | + dsAttrs[name] = vDest.Interface().(dsschema.Attribute) |
| 78 | + } |
| 79 | + return dsAttrs |
| 80 | +} |
| 81 | + |
| 82 | +// UpdateAttr is exported for testing purposes only and should not be used directly. |
| 83 | +func UpdateAttr(attr any) { |
| 84 | + ptr := reflect.ValueOf(attr) |
| 85 | + if ptr.Kind() != reflect.Ptr { |
| 86 | + panic("not ptr, please fix caller") |
| 87 | + } |
| 88 | + v := ptr.Elem() |
| 89 | + if v.Kind() != reflect.Struct { |
| 90 | + panic("not struct, please fix caller") |
| 91 | + } |
| 92 | + updateDesc(v) |
| 93 | + updateMap(v, "Attributes") |
| 94 | + updateMap(v, "Blocks") |
| 95 | + updateNested(v, "NestedObject") |
| 96 | +} |
| 97 | + |
| 98 | +func updateDesc(v reflect.Value) { |
| 99 | + fDescr, fMDDescr := v.FieldByName("Description"), v.FieldByName("MarkdownDescription") |
| 100 | + if !fDescr.IsValid() || !fMDDescr.IsValid() { |
| 101 | + return |
| 102 | + } |
| 103 | + if !fDescr.CanSet() || fDescr.Kind() != reflect.String || |
| 104 | + !fMDDescr.CanSet() || fMDDescr.Kind() != reflect.String { |
| 105 | + panic("invalid desc fields, please fix caller") |
| 106 | + } |
| 107 | + strDescr, strMDDescr := fDescr.String(), fMDDescr.String() |
| 108 | + if strDescr != "" && strMDDescr != "" { |
| 109 | + panic("both descriptions exist, please fix caller: " + strDescr) |
| 110 | + } |
| 111 | + if strDescr == "" { |
| 112 | + fDescr.SetString(fMDDescr.String()) |
| 113 | + } else { |
| 114 | + fMDDescr.SetString(fDescr.String()) |
| 115 | + } |
| 116 | +} |
| 117 | + |
| 118 | +func updateMap(v reflect.Value, mapName string) { |
| 119 | + f := v.FieldByName(mapName) |
| 120 | + if !f.IsValid() { |
| 121 | + return |
| 122 | + } |
| 123 | + if f.Kind() != reflect.Map { |
| 124 | + panic("not map, please fix caller: " + mapName) |
| 125 | + } |
| 126 | + for _, k := range f.MapKeys() { |
| 127 | + v := f.MapIndex(k).Elem() |
| 128 | + newPtr := reflect.New(v.Type()) |
| 129 | + newPtr.Elem().Set(v) |
| 130 | + UpdateAttr(newPtr.Interface()) |
| 131 | + f.SetMapIndex(k, newPtr.Elem()) |
| 132 | + } |
| 133 | +} |
| 134 | + |
| 135 | +func updateNested(v reflect.Value, nestedName string) { |
| 136 | + f := v.FieldByName(nestedName) |
| 137 | + if !f.IsValid() { |
| 138 | + return |
| 139 | + } |
| 140 | + ptr := f.Addr() |
| 141 | + UpdateAttr(ptr.Interface()) |
| 142 | +} |
0 commit comments