github.com/databricks/cli@v0.203.0/bundle/deploy/terraform/convert.go (about) 1 package terraform 2 3 import ( 4 "encoding/json" 5 "fmt" 6 7 "github.com/databricks/cli/bundle/config" 8 "github.com/databricks/cli/bundle/config/resources" 9 "github.com/databricks/cli/bundle/internal/tf/schema" 10 tfjson "github.com/hashicorp/terraform-json" 11 ) 12 13 func conv(from any, to any) { 14 buf, _ := json.Marshal(from) 15 json.Unmarshal(buf, &to) 16 } 17 18 func convPermissions(acl []resources.Permission) *schema.ResourcePermissions { 19 if len(acl) == 0 { 20 return nil 21 } 22 23 resource := schema.ResourcePermissions{} 24 for _, ac := range acl { 25 resource.AccessControl = append(resource.AccessControl, convPermission(ac)) 26 } 27 28 return &resource 29 } 30 31 func convPermission(ac resources.Permission) schema.ResourcePermissionsAccessControl { 32 dst := schema.ResourcePermissionsAccessControl{ 33 PermissionLevel: ac.Level, 34 } 35 if ac.UserName != "" { 36 dst.UserName = ac.UserName 37 } 38 if ac.GroupName != "" { 39 dst.GroupName = ac.GroupName 40 } 41 if ac.ServicePrincipalName != "" { 42 dst.ServicePrincipalName = ac.ServicePrincipalName 43 } 44 return dst 45 } 46 47 // BundleToTerraform converts resources in a bundle configuration 48 // to the equivalent Terraform JSON representation. 49 // 50 // NOTE: THIS IS CURRENTLY A HACK. WE NEED A BETTER WAY TO 51 // CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT. 52 func BundleToTerraform(config *config.Root) *schema.Root { 53 tfroot := schema.NewRoot() 54 tfroot.Provider = schema.NewProviders() 55 tfroot.Resource = schema.NewResources() 56 57 for k, src := range config.Resources.Jobs { 58 var dst schema.ResourceJob 59 conv(src, &dst) 60 61 if src.JobSettings != nil { 62 for _, v := range src.Tasks { 63 var t schema.ResourceJobTask 64 conv(v, &t) 65 66 for _, v_ := range v.Libraries { 67 var l schema.ResourceJobTaskLibrary 68 conv(v_, &l) 69 t.Library = append(t.Library, l) 70 } 71 72 dst.Task = append(dst.Task, t) 73 } 74 75 for _, v := range src.JobClusters { 76 var t schema.ResourceJobJobCluster 77 conv(v, &t) 78 dst.JobCluster = append(dst.JobCluster, t) 79 } 80 81 // Unblock downstream work. To be addressed more generally later. 82 if git := src.GitSource; git != nil { 83 dst.GitSource = &schema.ResourceJobGitSource{ 84 Url: git.GitUrl, 85 Branch: git.GitBranch, 86 Commit: git.GitCommit, 87 Provider: string(git.GitProvider), 88 Tag: git.GitTag, 89 } 90 } 91 } 92 93 tfroot.Resource.Job[k] = &dst 94 95 // Configure permissions for this resource. 96 if rp := convPermissions(src.Permissions); rp != nil { 97 rp.JobId = fmt.Sprintf("${databricks_job.%s.id}", k) 98 tfroot.Resource.Permissions["job_"+k] = rp 99 } 100 } 101 102 for k, src := range config.Resources.Pipelines { 103 var dst schema.ResourcePipeline 104 conv(src, &dst) 105 106 if src.PipelineSpec != nil { 107 for _, v := range src.Libraries { 108 var l schema.ResourcePipelineLibrary 109 conv(v, &l) 110 dst.Library = append(dst.Library, l) 111 } 112 113 for _, v := range src.Clusters { 114 var l schema.ResourcePipelineCluster 115 conv(v, &l) 116 dst.Cluster = append(dst.Cluster, l) 117 } 118 } 119 120 tfroot.Resource.Pipeline[k] = &dst 121 122 // Configure permissions for this resource. 123 if rp := convPermissions(src.Permissions); rp != nil { 124 rp.PipelineId = fmt.Sprintf("${databricks_pipeline.%s.id}", k) 125 tfroot.Resource.Permissions["pipeline_"+k] = rp 126 } 127 } 128 129 for k, src := range config.Resources.Models { 130 var dst schema.ResourceMlflowModel 131 conv(src, &dst) 132 tfroot.Resource.MlflowModel[k] = &dst 133 134 // Configure permissions for this resource. 135 if rp := convPermissions(src.Permissions); rp != nil { 136 rp.RegisteredModelId = fmt.Sprintf("${databricks_mlflow_model.%s.registered_model_id}", k) 137 tfroot.Resource.Permissions["mlflow_model_"+k] = rp 138 } 139 } 140 141 for k, src := range config.Resources.Experiments { 142 var dst schema.ResourceMlflowExperiment 143 conv(src, &dst) 144 tfroot.Resource.MlflowExperiment[k] = &dst 145 146 // Configure permissions for this resource. 147 if rp := convPermissions(src.Permissions); rp != nil { 148 rp.ExperimentId = fmt.Sprintf("${databricks_mlflow_experiment.%s.id}", k) 149 tfroot.Resource.Permissions["mlflow_experiment_"+k] = rp 150 } 151 } 152 153 return tfroot 154 } 155 156 func TerraformToBundle(state *tfjson.State, config *config.Root) error { 157 for _, resource := range state.Values.RootModule.Resources { 158 // Limit to resources. 159 if resource.Mode != tfjson.ManagedResourceMode { 160 continue 161 } 162 163 switch resource.Type { 164 case "databricks_job": 165 var tmp schema.ResourceJob 166 conv(resource.AttributeValues, &tmp) 167 cur := config.Resources.Jobs[resource.Name] 168 conv(tmp, &cur) 169 config.Resources.Jobs[resource.Name] = cur 170 case "databricks_pipeline": 171 var tmp schema.ResourcePipeline 172 conv(resource.AttributeValues, &tmp) 173 cur := config.Resources.Pipelines[resource.Name] 174 conv(tmp, &cur) 175 config.Resources.Pipelines[resource.Name] = cur 176 case "databricks_mlflow_model": 177 var tmp schema.ResourceMlflowModel 178 conv(resource.AttributeValues, &tmp) 179 cur := config.Resources.Models[resource.Name] 180 conv(tmp, &cur) 181 config.Resources.Models[resource.Name] = cur 182 case "databricks_mlflow_experiment": 183 var tmp schema.ResourceMlflowExperiment 184 conv(resource.AttributeValues, &tmp) 185 cur := config.Resources.Experiments[resource.Name] 186 conv(tmp, &cur) 187 config.Resources.Experiments[resource.Name] = cur 188 case "databricks_permissions": 189 // Ignore; no need to pull these back into the configuration. 190 default: 191 return fmt.Errorf("missing mapping for %s", resource.Type) 192 } 193 } 194 195 return nil 196 }