github.com/hashicorp/packer@v1.14.3/hcl2template/parser.go (about)

     1  // Copyright (c) HashiCorp, Inc.
     2  // SPDX-License-Identifier: BUSL-1.1
     3  
     4  package hcl2template
     5  
     6  import (
     7  	"fmt"
     8  	"os"
     9  	"path/filepath"
    10  	"reflect"
    11  
    12  	"github.com/hashicorp/go-multierror"
    13  	"github.com/hashicorp/go-version"
    14  	"github.com/hashicorp/hcl/v2"
    15  	"github.com/hashicorp/hcl/v2/ext/dynblock"
    16  	"github.com/hashicorp/hcl/v2/hclparse"
    17  	packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
    18  	"github.com/hashicorp/packer/internal/dag"
    19  	"github.com/hashicorp/packer/packer"
    20  	"github.com/zclconf/go-cty/cty"
    21  )
    22  
    23  const (
    24  	packerLabel            = "packer"
    25  	sourceLabel            = "source"
    26  	variablesLabel         = "variables"
    27  	variableLabel          = "variable"
    28  	localsLabel            = "locals"
    29  	localLabel             = "local"
    30  	dataSourceLabel        = "data"
    31  	buildLabel             = "build"
    32  	hcpPackerRegistryLabel = "hcp_packer_registry"
    33  	communicatorLabel      = "communicator"
    34  )
    35  
    36  var configSchema = &hcl.BodySchema{
    37  	Blocks: []hcl.BlockHeaderSchema{
    38  		{Type: packerLabel},
    39  		{Type: sourceLabel, LabelNames: []string{"type", "name"}},
    40  		{Type: variablesLabel},
    41  		{Type: variableLabel, LabelNames: []string{"name"}},
    42  		{Type: localsLabel},
    43  		{Type: localLabel, LabelNames: []string{"name"}},
    44  		{Type: dataSourceLabel, LabelNames: []string{"type", "name"}},
    45  		{Type: buildLabel},
    46  		{Type: hcpPackerRegistryLabel},
    47  		{Type: communicatorLabel, LabelNames: []string{"type", "name"}},
    48  	},
    49  }
    50  
    51  // packerBlockSchema is the schema for a top-level "packer" block in
    52  // a configuration file.
    53  var packerBlockSchema = &hcl.BodySchema{
    54  	Attributes: []hcl.AttributeSchema{
    55  		{Name: "required_version"},
    56  	},
    57  	Blocks: []hcl.BlockHeaderSchema{
    58  		{Type: "required_plugins"},
    59  	},
    60  }
    61  
    62  // Parser helps you parse HCL folders. It will parse an hcl file or directory
    63  // and start builders, provisioners and post-processors to configure them with
    64  // the parsed HCL and then return a []packersdk.Build. Packer will use that list
    65  // of Builds to run everything in order.
    66  type Parser struct {
    67  	CorePackerVersion *version.Version
    68  
    69  	CorePackerVersionString string
    70  
    71  	PluginConfig *packer.PluginConfig
    72  
    73  	ValidationOptions
    74  
    75  	*hclparse.Parser
    76  }
    77  
    78  const (
    79  	hcl2FileExt            = ".pkr.hcl"
    80  	hcl2JsonFileExt        = ".pkr.json"
    81  	hcl2VarFileExt         = ".pkrvars.hcl"
    82  	hcl2VarJsonFileExt     = ".pkrvars.json"
    83  	hcl2AutoVarFileExt     = ".auto.pkrvars.hcl"
    84  	hcl2AutoVarJsonFileExt = ".auto.pkrvars.json"
    85  )
    86  
    87  // Parse will Parse all HCL files in filename. Path can be a folder or a file.
    88  //
    89  // Parse will first Parse packer and variables blocks, omitting the rest, which
    90  // can be expanded with dynamic blocks. We need to evaluate all variables for
    91  // that, so that data sources can expand dynamic blocks too.
    92  //
    93  // Parse returns a PackerConfig that contains configuration layout of a packer
    94  // build; sources(builders)/provisioners/posts-processors will not be started
    95  // and their contents won't be verified; Most syntax errors will cause an error,
    96  // init should be called next to expand dynamic blocks and verify that used
    97  // things do exist.
    98  func (p *Parser) Parse(filename string, varFiles []string, argVars map[string]string) (*PackerConfig, hcl.Diagnostics) {
    99  	var files []*hcl.File
   100  	var diags hcl.Diagnostics
   101  
   102  	// parse config files
   103  	if filename != "" {
   104  		hclFiles, jsonFiles, moreDiags := GetHCL2Files(filename, hcl2FileExt, hcl2JsonFileExt)
   105  		diags = append(diags, moreDiags...)
   106  		if moreDiags.HasErrors() {
   107  			// here this probably means that the file was not found, let's
   108  			// simply leave early.
   109  			return nil, diags
   110  		}
   111  		if len(hclFiles)+len(jsonFiles) == 0 {
   112  			diags = append(diags, &hcl.Diagnostic{
   113  				Severity: hcl.DiagError,
   114  				Summary:  "Could not find any config file in " + filename,
   115  				Detail: "A config file must be suffixed with `.pkr.hcl` or " +
   116  					"`.pkr.json`. A folder can be referenced.",
   117  			})
   118  		}
   119  		for _, filename := range hclFiles {
   120  			f, moreDiags := p.ParseHCLFile(filename)
   121  			diags = append(diags, moreDiags...)
   122  			files = append(files, f)
   123  		}
   124  		for _, filename := range jsonFiles {
   125  			f, moreDiags := p.ParseJSONFile(filename)
   126  			diags = append(diags, moreDiags...)
   127  			files = append(files, f)
   128  		}
   129  		if diags.HasErrors() {
   130  			return nil, diags
   131  		}
   132  	}
   133  
   134  	basedir := filename
   135  	if isDir, err := isDir(basedir); err == nil && !isDir {
   136  		basedir = filepath.Dir(basedir)
   137  	}
   138  	wd, err := os.Getwd()
   139  	if err != nil {
   140  		diags = append(diags, &hcl.Diagnostic{
   141  			Severity: hcl.DiagError,
   142  			Summary:  "Could not find current working directory",
   143  			Detail:   err.Error(),
   144  		})
   145  	}
   146  	cfg := &PackerConfig{
   147  		Basedir:                 basedir,
   148  		Cwd:                     wd,
   149  		CorePackerVersionString: p.CorePackerVersionString,
   150  		HCPVars:                 map[string]cty.Value{},
   151  		ValidationOptions:       p.ValidationOptions,
   152  		parser:                  p,
   153  		files:                   files,
   154  	}
   155  
   156  	for _, file := range files {
   157  		coreVersionConstraints, moreDiags := sniffCoreVersionRequirements(file.Body)
   158  		cfg.Packer.VersionConstraints = append(cfg.Packer.VersionConstraints, coreVersionConstraints...)
   159  		diags = append(diags, moreDiags...)
   160  	}
   161  
   162  	// Before we go further, we'll check to make sure this version can read
   163  	// all files, so we can produce a version-related error message rather than
   164  	// potentially-confusing downstream errors.
   165  	versionDiags := cfg.CheckCoreVersionRequirements(p.CorePackerVersion.Core())
   166  	diags = append(diags, versionDiags...)
   167  	if versionDiags.HasErrors() {
   168  		return cfg, diags
   169  	}
   170  
   171  	// Looks for invalid arguments or unsupported block types
   172  	{
   173  		for _, file := range files {
   174  			_, moreDiags := file.Body.Content(configSchema)
   175  			diags = append(diags, moreDiags...)
   176  		}
   177  	}
   178  
   179  	// Decode required_plugins blocks.
   180  	//
   181  	// Note: using `latest` ( or actually an empty string ) in a config file
   182  	// does not work and packer will ask you to pick a version
   183  	{
   184  		for _, file := range files {
   185  			diags = append(diags, cfg.decodeRequiredPluginsBlock(file)...)
   186  		}
   187  	}
   188  
   189  	// Decode variable blocks so that they are available later on. Here locals
   190  	// can use input variables so we decode input variables first.
   191  	{
   192  		for _, file := range files {
   193  			diags = append(diags, cfg.decodeInputVariables(file)...)
   194  		}
   195  
   196  		for _, file := range files {
   197  			morediags := p.decodeDatasources(file, cfg)
   198  			diags = append(diags, morediags...)
   199  		}
   200  
   201  		for _, file := range files {
   202  			moreLocals, morediags := parseLocalVariableBlocks(file)
   203  			diags = append(diags, morediags...)
   204  			cfg.LocalBlocks = append(cfg.LocalBlocks, moreLocals...)
   205  		}
   206  
   207  		diags = diags.Extend(cfg.checkForDuplicateLocalDefinition())
   208  	}
   209  
   210  	// parse var files
   211  	{
   212  		hclVarFiles, jsonVarFiles, moreDiags := GetHCL2Files(filename, hcl2AutoVarFileExt, hcl2AutoVarJsonFileExt)
   213  		diags = append(diags, moreDiags...)
   214  
   215  		// Combine all variable files into a single list, preserving the intended precedence and order.
   216  		// The order is: auto-loaded HCL files, auto-loaded JSON files, followed by user-specified varFiles.
   217  		// This ensures that user-specified files can override values from auto-loaded files,
   218  		// and that their relative order is preserved exactly as specified by the user.
   219  		variableFileNames := append(append(hclVarFiles, jsonVarFiles...), varFiles...)
   220  
   221  		var variableFiles []*hcl.File
   222  
   223  		for _, file := range variableFileNames {
   224  			var (
   225  				f         *hcl.File
   226  				moreDiags hcl.Diagnostics
   227  			)
   228  			switch filepath.Ext(file) {
   229  			case ".hcl":
   230  				f, moreDiags = p.ParseHCLFile(file)
   231  			case ".json":
   232  				f, moreDiags = p.ParseJSONFile(file)
   233  			default:
   234  				moreDiags = hcl.Diagnostics{
   235  					&hcl.Diagnostic{
   236  						Severity: hcl.DiagError,
   237  						Summary:  "Could not guess format of " + file,
   238  						Detail:   "A var file must be suffixed with `.hcl` or `.json`.",
   239  					},
   240  				}
   241  			}
   242  
   243  			diags = append(diags, moreDiags...)
   244  			if moreDiags.HasErrors() {
   245  				continue
   246  			}
   247  			variableFiles = append(variableFiles, f)
   248  
   249  		}
   250  
   251  		diags = append(diags, cfg.collectInputVariableValues(os.Environ(), variableFiles, argVars)...)
   252  	}
   253  
   254  	return cfg, diags
   255  }
   256  
   257  // sniffCoreVersionRequirements does minimal parsing of the given body for
   258  // "packer" blocks with "required_version" attributes, returning the
   259  // requirements found.
   260  //
   261  // This is intended to maximize the chance that we'll be able to read the
   262  // requirements (syntax errors notwithstanding) even if the config file contains
   263  // constructs that might've been added in future versions
   264  //
   265  // This is a "best effort" sort of method which will return constraints it is
   266  // able to find, but may return no constraints at all if the given body is
   267  // so invalid that it cannot be decoded at all.
   268  func sniffCoreVersionRequirements(body hcl.Body) ([]VersionConstraint, hcl.Diagnostics) {
   269  
   270  	var sniffRootSchema = &hcl.BodySchema{
   271  		Blocks: []hcl.BlockHeaderSchema{
   272  			{
   273  				Type: packerLabel,
   274  			},
   275  		},
   276  	}
   277  
   278  	rootContent, _, diags := body.PartialContent(sniffRootSchema)
   279  
   280  	var constraints []VersionConstraint
   281  
   282  	for _, block := range rootContent.Blocks {
   283  		content, blockDiags := block.Body.Content(packerBlockSchema)
   284  		diags = append(diags, blockDiags...)
   285  
   286  		attr, exists := content.Attributes["required_version"]
   287  		if !exists {
   288  			continue
   289  		}
   290  
   291  		constraint, constraintDiags := decodeVersionConstraint(attr)
   292  		diags = append(diags, constraintDiags...)
   293  		if !constraintDiags.HasErrors() {
   294  			constraints = append(constraints, constraint)
   295  		}
   296  	}
   297  
   298  	return constraints, diags
   299  }
   300  
   301  func filterVarsFromLogs(inputOrLocal Variables) {
   302  	for _, variable := range inputOrLocal {
   303  		if !variable.Sensitive {
   304  			continue
   305  		}
   306  		value := variable.Value()
   307  		_ = cty.Walk(value, func(_ cty.Path, nested cty.Value) (bool, error) {
   308  			if nested.IsWhollyKnown() && !nested.IsNull() && nested.Type().Equals(cty.String) {
   309  				packersdk.LogSecretFilter.Set(nested.AsString())
   310  			}
   311  			return true, nil
   312  		})
   313  	}
   314  }
   315  
   316  func (cfg *PackerConfig) detectBuildPrereqDependencies() hcl.Diagnostics {
   317  	var diags hcl.Diagnostics
   318  
   319  	for _, ds := range cfg.Datasources {
   320  		dependencies := GetVarsByType(ds.block, "data")
   321  		dependencies = append(dependencies, GetVarsByType(ds.block, "local")...)
   322  
   323  		for _, dep := range dependencies {
   324  			// If something is locally aliased as `local` or `data`, we'll falsely
   325  			// report it as a local variable, which is not necessarily what we
   326  			// want to process here, so we continue.
   327  			//
   328  			// Note: this is kinda brittle, we should understand scopes to accurately
   329  			// mark something from an expression as a reference to a local variable.
   330  			// No real good solution for this now, besides maybe forbidding something
   331  			// to be locally aliased as `local`.
   332  			if len(dep) < 2 {
   333  				continue
   334  			}
   335  			rs, err := NewRefStringFromDep(dep)
   336  			if err != nil {
   337  				diags = diags.Append(&hcl.Diagnostic{
   338  					Severity: hcl.DiagError,
   339  					Summary:  "failed to process datasource dependency",
   340  					Detail: fmt.Sprintf("An error occurred while processing a dependency for data source %s: %s",
   341  						ds.Name(), err),
   342  				})
   343  				continue
   344  			}
   345  
   346  			err = ds.RegisterDependency(rs)
   347  			if err != nil {
   348  				diags = diags.Append(&hcl.Diagnostic{
   349  					Severity: hcl.DiagError,
   350  					Summary:  "failed to register datasource dependency",
   351  					Detail: fmt.Sprintf("An error occurred while registering %q as a dependency for data source %s: %s",
   352  						rs, ds.Name(), err),
   353  				})
   354  			}
   355  		}
   356  
   357  		cfg.Datasources[ds.Ref()] = ds
   358  	}
   359  
   360  	for _, loc := range cfg.LocalBlocks {
   361  		dependencies := FilterTraversalsByType(loc.Expr.Variables(), "data")
   362  		dependencies = append(dependencies, FilterTraversalsByType(loc.Expr.Variables(), "local")...)
   363  
   364  		for _, dep := range dependencies {
   365  			// If something is locally aliased as `local` or `data`, we'll falsely
   366  			// report it as a local variable, which is not necessarily what we
   367  			// want to process here, so we continue.
   368  			//
   369  			// Note: this is kinda brittle, we should understand scopes to accurately
   370  			// mark something from an expression as a reference to a local variable.
   371  			// No real good solution for this now, besides maybe forbidding something
   372  			// to be locally aliased as `local`.
   373  			if len(dep) < 2 {
   374  				continue
   375  			}
   376  			rs, err := NewRefStringFromDep(dep)
   377  			if err != nil {
   378  				diags = diags.Append(&hcl.Diagnostic{
   379  					Severity: hcl.DiagError,
   380  					Summary:  "failed to process local dependency",
   381  					Detail: fmt.Sprintf("An error occurred while processing a dependency for local variable %s: %s",
   382  						loc.LocalName, err),
   383  				})
   384  				continue
   385  			}
   386  
   387  			err = loc.RegisterDependency(rs)
   388  			if err != nil {
   389  				diags = diags.Append(&hcl.Diagnostic{
   390  					Severity: hcl.DiagError,
   391  					Summary:  "failed to register local dependency",
   392  					Detail: fmt.Sprintf("An error occurred while registering %q as a dependency for local variable %s: %s",
   393  						rs, loc.LocalName, err),
   394  				})
   395  			}
   396  		}
   397  	}
   398  
   399  	return diags
   400  }
   401  
   402  func (cfg *PackerConfig) buildPrereqsDAG() (*dag.AcyclicGraph, error) {
   403  	retGraph := dag.AcyclicGraph{}
   404  
   405  	verticesMap := map[string]dag.Vertex{}
   406  
   407  	var err error
   408  
   409  	// Do a first pass to create all the vertices
   410  	for ref := range cfg.Datasources {
   411  		// We keep a reference to the datasource separately from where it
   412  		// is used to avoid getting bit by the loop semantics.
   413  		//
   414  		// This `ds` local variable is the same object for every loop
   415  		// so if we directly use the address of this object, we'll end
   416  		// up referencing the last node of the loop for each vertex,
   417  		// leading to implicit cycles.
   418  		//
   419  		// However by capturing it locally in this loop, we have a
   420  		// reference to the actual datasource block, so it ends-up being
   421  		// the right instance for each vertex.
   422  		ds := cfg.Datasources[ref]
   423  		v := retGraph.Add(&ds)
   424  		verticesMap[fmt.Sprintf("data.%s", ds.Name())] = v
   425  	}
   426  	// Note: locals being references to the objects already, we can safely
   427  	// use the reference returned by the local loop.
   428  	for _, local := range cfg.LocalBlocks {
   429  		v := retGraph.Add(local)
   430  		verticesMap[fmt.Sprintf("local.%s", local.LocalName)] = v
   431  	}
   432  
   433  	// Connect the vertices together
   434  	//
   435  	// Vertices that don't have dependencies will be connected to the
   436  	// root vertex of the graph
   437  	for _, ds := range cfg.Datasources {
   438  		dsName := fmt.Sprintf("data.%s", ds.Name())
   439  
   440  		source := verticesMap[dsName]
   441  		if source == nil {
   442  			err = multierror.Append(err, fmt.Errorf("unable to find source vertex %q for dependency analysis, this is likely a Packer bug", dsName))
   443  			continue
   444  		}
   445  
   446  		for _, dep := range ds.Dependencies {
   447  			target := verticesMap[dep.String()]
   448  			if target == nil {
   449  				err = multierror.Append(err, fmt.Errorf("could not get dependency %q for %q, %q missing in template", dep.String(), dsName, dep.String()))
   450  				continue
   451  			}
   452  
   453  			retGraph.Connect(dag.BasicEdge(source, target))
   454  		}
   455  	}
   456  	for _, loc := range cfg.LocalBlocks {
   457  		locName := fmt.Sprintf("local.%s", loc.LocalName)
   458  
   459  		source := verticesMap[locName]
   460  		if source == nil {
   461  			err = multierror.Append(err, fmt.Errorf("unable to find source vertex %q for dependency analysis, this is likely a Packer bug", locName))
   462  			continue
   463  		}
   464  
   465  		for _, dep := range loc.dependencies {
   466  			target := verticesMap[dep.String()]
   467  
   468  			if target == nil {
   469  				err = multierror.Append(err, fmt.Errorf("could not get dependency %q for %q, %q missing in template", dep.String(), locName, dep.String()))
   470  				continue
   471  			}
   472  
   473  			retGraph.Connect(dag.BasicEdge(source, target))
   474  		}
   475  	}
   476  
   477  	if validateErr := retGraph.Validate(); validateErr != nil {
   478  		err = multierror.Append(err, validateErr)
   479  	}
   480  
   481  	return &retGraph, err
   482  }
   483  
   484  func (cfg *PackerConfig) evaluateBuildPrereqs(skipDatasources bool) hcl.Diagnostics {
   485  	diags := cfg.detectBuildPrereqDependencies()
   486  	if diags.HasErrors() {
   487  		return diags
   488  	}
   489  
   490  	graph, err := cfg.buildPrereqsDAG()
   491  	if err != nil {
   492  		return diags.Append(&hcl.Diagnostic{
   493  			Severity: hcl.DiagError,
   494  			Summary:  "failed to prepare execution graph",
   495  			Detail:   fmt.Sprintf("An error occurred while building the graph for datasources/locals: %s", err),
   496  		})
   497  	}
   498  
   499  	walkFunc := func(v dag.Vertex) hcl.Diagnostics {
   500  		var diags hcl.Diagnostics
   501  
   502  		switch bl := v.(type) {
   503  		case *DatasourceBlock:
   504  			diags = cfg.evaluateDatasource(*bl, skipDatasources)
   505  		case *LocalBlock:
   506  			var val *Variable
   507  			if cfg.LocalVariables == nil {
   508  				cfg.LocalVariables = make(Variables)
   509  			}
   510  			val, diags = cfg.evaluateLocalVariable(bl)
   511  			// Note: clumsy a bit, but we won't add the variable as `nil` here
   512  			// unless no errors have been reported during evaluation.
   513  			//
   514  			// This prevents Packer from panicking down the line, as initialisation
   515  			// doesn't stop if there are diags, so if `val` is nil, it crashes.
   516  			if !diags.HasErrors() {
   517  				cfg.LocalVariables[bl.LocalName] = val
   518  			}
   519  		default:
   520  			diags = diags.Append(&hcl.Diagnostic{
   521  				Severity: hcl.DiagError,
   522  				Summary:  "unsupported DAG node type",
   523  				Detail: fmt.Sprintf("A node of type %q was added to the DAG, but cannot be "+
   524  					"evaluated as it is unsupported. "+
   525  					"This is a Packer bug, please report it so we can investigate.",
   526  					reflect.TypeOf(v).String()),
   527  			})
   528  		}
   529  
   530  		if diags.HasErrors() {
   531  			return diags
   532  		}
   533  
   534  		return nil
   535  	}
   536  
   537  	for _, vtx := range graph.ReverseTopologicalOrder() {
   538  		vtxDiags := walkFunc(vtx)
   539  		if vtxDiags.HasErrors() {
   540  			diags = diags.Extend(vtxDiags)
   541  			return diags
   542  		}
   543  	}
   544  
   545  	return nil
   546  }
   547  
   548  func (cfg *PackerConfig) Initialize(opts packer.InitializeOptions) hcl.Diagnostics {
   549  	diags := cfg.InputVariables.ValidateValues()
   550  
   551  	if opts.UseSequential {
   552  		diags = diags.Extend(cfg.evaluateDatasources(opts.SkipDatasourcesExecution))
   553  		diags = diags.Extend(cfg.evaluateLocalVariables(cfg.LocalBlocks))
   554  	} else {
   555  		diags = diags.Extend(cfg.evaluateBuildPrereqs(opts.SkipDatasourcesExecution))
   556  	}
   557  
   558  	filterVarsFromLogs(cfg.InputVariables)
   559  	filterVarsFromLogs(cfg.LocalVariables)
   560  
   561  	// parse the actual content // rest
   562  	for _, file := range cfg.files {
   563  		diags = append(diags, cfg.parser.parseConfig(file, cfg)...)
   564  	}
   565  
   566  	diags = append(diags, cfg.initializeBlocks()...)
   567  
   568  	return diags
   569  }
   570  
   571  // parseConfig looks in the found blocks for everything that is not a variable
   572  // block.
   573  func (p *Parser) parseConfig(f *hcl.File, cfg *PackerConfig) hcl.Diagnostics {
   574  	var diags hcl.Diagnostics
   575  
   576  	body := f.Body
   577  	body = dynblock.Expand(body, cfg.EvalContext(DatasourceContext, nil))
   578  	content, moreDiags := body.Content(configSchema)
   579  	diags = append(diags, moreDiags...)
   580  
   581  	for _, block := range content.Blocks {
   582  		switch block.Type {
   583  		case buildHCPPackerRegistryLabel:
   584  			if cfg.HCPPackerRegistry != nil {
   585  				diags = append(diags, &hcl.Diagnostic{
   586  					Severity: hcl.DiagError,
   587  					Summary:  "Only one " + buildHCPPackerRegistryLabel + " is allowed",
   588  					Subject:  block.DefRange.Ptr(),
   589  				})
   590  				continue
   591  			}
   592  			hcpPackerRegistry, moreDiags := p.decodeHCPRegistry(block, cfg)
   593  			diags = append(diags, moreDiags...)
   594  			if moreDiags.HasErrors() {
   595  				continue
   596  			}
   597  			cfg.HCPPackerRegistry = hcpPackerRegistry
   598  
   599  		case sourceLabel:
   600  			source, moreDiags := p.decodeSource(block)
   601  			diags = append(diags, moreDiags...)
   602  			if moreDiags.HasErrors() {
   603  				continue
   604  			}
   605  
   606  			ref := source.Ref()
   607  			if existing, found := cfg.Sources[ref]; found {
   608  				diags = append(diags, &hcl.Diagnostic{
   609  					Severity: hcl.DiagError,
   610  					Summary:  "Duplicate " + sourceLabel + " block",
   611  					Detail: fmt.Sprintf("This "+sourceLabel+" block has the "+
   612  						"same builder type and name as a previous block declared "+
   613  						"at %s. Each "+sourceLabel+" must have a unique name per builder type.",
   614  						existing.block.DefRange.Ptr()),
   615  					Subject: source.block.DefRange.Ptr(),
   616  				})
   617  				continue
   618  			}
   619  
   620  			if cfg.Sources == nil {
   621  				cfg.Sources = map[SourceRef]SourceBlock{}
   622  			}
   623  			cfg.Sources[ref] = source
   624  
   625  		case buildLabel:
   626  			build, moreDiags := p.decodeBuildConfig(block, cfg)
   627  			diags = append(diags, moreDiags...)
   628  			if moreDiags.HasErrors() {
   629  				continue
   630  			}
   631  
   632  			cfg.Builds = append(cfg.Builds, build)
   633  		}
   634  	}
   635  
   636  	return diags
   637  }
   638  
   639  func (p *Parser) decodeDatasources(file *hcl.File, cfg *PackerConfig) hcl.Diagnostics {
   640  	var diags hcl.Diagnostics
   641  
   642  	body := file.Body
   643  	content, _ := body.Content(configSchema)
   644  
   645  	for _, block := range content.Blocks {
   646  		switch block.Type {
   647  		case dataSourceLabel:
   648  			datasource, moreDiags := p.decodeDataBlock(block)
   649  			diags = append(diags, moreDiags...)
   650  			if moreDiags.HasErrors() {
   651  				continue
   652  			}
   653  			ref := datasource.Ref()
   654  			if existing, found := cfg.Datasources[ref]; found {
   655  				diags = append(diags, &hcl.Diagnostic{
   656  					Severity: hcl.DiagError,
   657  					Summary:  "Duplicate " + dataSourceLabel + " block",
   658  					Detail: fmt.Sprintf("This "+dataSourceLabel+" block has the "+
   659  						"same data type and name as a previous block declared "+
   660  						"at %s. Each "+dataSourceLabel+" must have a unique name per builder type.",
   661  						existing.block.DefRange.Ptr()),
   662  					Subject: datasource.block.DefRange.Ptr(),
   663  				})
   664  				continue
   665  			}
   666  			if cfg.Datasources == nil {
   667  				cfg.Datasources = Datasources{}
   668  			}
   669  			cfg.Datasources[ref] = *datasource
   670  		}
   671  	}
   672  
   673  	return diags
   674  }