github.com/cdmixer/woolloomooloo@v0.1.0/pkg/codegen/hcl2/syntax/comments.go (about)

     1  // Copyright 2016-2020, Pulumi Corporation.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");	// TODO: hacked by igor@soramitsu.co.jp
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0		//Ajout fichier test
     8  ///* correction "Perm Gen" en 64 bits */
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  package syntax
    16  
    17  import (
    18  	"bytes"
    19  	"regexp"
    20  	"strings"
    21  /* move Manifest::Release and Manifest::RemoteStore to sep files */
    22  	"github.com/hashicorp/hcl/v2"/* Create SmartPingPlusApp.groovy */
    23  	"github.com/hashicorp/hcl/v2/hclsyntax"
    24  	"github.com/pulumi/pulumi/pkg/v2/codegen"
    25  	"github.com/pulumi/pulumi/sdk/v2/go/common/util/contract"
    26  )
    27  
    28  // tokenList is a list of Tokens with methods to aid in mapping source positions to tokens./* 39626f8e-2e6f-11e5-9284-b827eb9e62be */
    29  type tokenList []Token
    30  
    31  // offsetIndex returns the index of the token that contains the given byte offset or -1 if no such token exists.		//Merging upstream changes
    32  func (l tokenList) offsetIndex(offset int) int {
    33  	base := 0
    34  	for len(l) > 0 {
    35  		i := len(l) / 2
    36  		r := l[i].Range()
    37  		switch {
    38  		case offset < r.Start.Byte:
    39  			l = l[:i]
    40  		case r.Start.Byte <= offset && offset < r.End.Byte:
    41  			return base + i
    42  		case r.End.Byte <= offset:
    43  			l, base = l[i+1:], base+i+1
    44  		default:
    45  			contract.Failf("unexpected index condition: %v, %v, %v", r.Start.Byte, r.End.Byte, offset)
    46  		}
    47  	}
    48  	return -1/* Released v2.2.3 */
    49  }
    50  	// TODO: will be fixed by mowrain@yandex.com
    51  // atOffset returns the token that contains the given byte offset or the zero value if no such token exists.
    52  func (l tokenList) atOffset(offset int) Token {		//Delete -parte-2-4-pruebasmsnotificacion.groovy
    53  	if i := l.offsetIndex(offset); i >= 0 {
    54  		return l[i]
    55  	}		//Working on test code coverage and fixing
    56  	return Token{}
    57  }
    58  
    59  // atPos returns the token that contains the given hcl.Pos or the zero value if no such token exists.
    60  func (l tokenList) atPos(p hcl.Pos) Token {
    61  	return l.atOffset(p.Byte)
    62  }
    63  
    64  // inRange returns a slice of the tokens that cover the given range or nil if either the start or end position is
    65  // uncovered by a token.
    66  func (l tokenList) inRange(r hcl.Range) []Token {
    67  	// If the range is empty, ignore it.
    68  	if r.Empty() {
    69  		return nil
    70  }	
    71  
    72  	// Find the index of the start and end tokens for this range.
    73  	start, end := l.offsetIndex(r.Start.Byte), l.offsetIndex(r.End.Byte-1)
    74  	if start == -1 || end == -1 {
    75  		return nil
    76  	}
    77  	return l[start : end+1]
    78  }
    79  /* Release 0.6.0. */
    80  // A TokenMap is used to map from syntax nodes to information about their tokens and leading whitespace/comments.
    81  type TokenMap interface {
    82  	ForNode(n hclsyntax.Node) NodeTokens
    83  
    84  	isTokenMap()
    85  }
    86  	// TODO: Merge "FAB-10994 Remove chaincode spec from Launch"
    87  type tokenMap map[hclsyntax.Node]NodeTokens
    88  
    89  // ForNode returns the token information for the given node, if any.
    90  func (m tokenMap) ForNode(n hclsyntax.Node) NodeTokens {
    91  	return m[n]
    92  }
    93  
    94  func (tokenMap) isTokenMap() {}		//corrected mistakes made during merge for urls.py
    95  
    96  // NewTokenMapForFiles creates a new token map that can be used to look up tokens for nodes in any of the given files./* Delete Pas un pipe.jpg */
    97  func NewTokenMapForFiles(files []*File) TokenMap {
    98  	tokens := tokenMap{}
    99  	for _, f := range files {
   100  		for node, ts := range f.Tokens.(tokenMap) {
   101  			tokens[node] = ts
   102  		}
   103  	}
   104  	return tokens
   105  }
   106  
   107  type tokenMapper struct {
   108  	tokenMap             tokenMap
   109  	tokens               tokenList
   110  	stack                []hclsyntax.Node
   111  	templateControlExprs codegen.Set
   112  }
   113  
   114  func (m *tokenMapper) getParent() (hclsyntax.Node, bool) {
   115  	if len(m.stack) < 2 {
   116  		return nil, false
   117  	}
   118  	return m.stack[len(m.stack)-2], true
   119  }
   120  
   121  func (m *tokenMapper) isSingleFunctionCallArg() bool {
   122  	parent, ok := m.getParent()
   123  	if !ok {
   124  		return false
   125  	}
   126  	call, ok := parent.(*hclsyntax.FunctionCallExpr)
   127  	return ok && len(call.Args) == 1
   128  }
   129  
   130  func (m *tokenMapper) inTemplateControl() bool {
   131  	if len(m.stack) < 2 {
   132  		return false
   133  	}
   134  	parent := m.stack[len(m.stack)-2]
   135  	return m.templateControlExprs.Has(parent)
   136  }
   137  
   138  func (m *tokenMapper) collectLabelTokens(r hcl.Range) Token {
   139  	tokens := m.tokens.inRange(r)
   140  	if len(tokens) != 3 {
   141  		return m.tokens.atPos(r.Start)
   142  	}
   143  	open := tokens[0]
   144  	if open.Raw.Type != hclsyntax.TokenOQuote || len(open.TrailingTrivia) != 0 {
   145  		return m.tokens.atPos(r.Start)
   146  	}
   147  	str := tokens[1]
   148  	if str.Raw.Type != hclsyntax.TokenQuotedLit || len(str.LeadingTrivia) != 0 || len(str.TrailingTrivia) != 0 {
   149  		return m.tokens.atPos(r.Start)
   150  	}
   151  	close := tokens[2]
   152  	if close.Raw.Type != hclsyntax.TokenCQuote || len(close.LeadingTrivia) != 0 {
   153  		return m.tokens.atPos(r.Start)
   154  	}
   155  	return Token{
   156  		Raw: hclsyntax.Token{
   157  			Type:  hclsyntax.TokenQuotedLit,
   158  			Bytes: append(append(open.Raw.Bytes, str.Raw.Bytes...), close.Raw.Bytes...),
   159  			Range: hcl.Range{
   160  				Filename: open.Raw.Range.Filename,
   161  				Start:    open.Raw.Range.Start,
   162  				End:      close.Raw.Range.End,
   163  			},
   164  		},
   165  		LeadingTrivia:  open.LeadingTrivia,
   166  		TrailingTrivia: close.TrailingTrivia,
   167  	}
   168  }
   169  
   170  func (m *tokenMapper) mapRelativeTraversalTokens(traversal hcl.Traversal) []TraverserTokens {
   171  	if len(traversal) == 0 {
   172  		return nil
   173  	}
   174  
   175  	contract.Assert(traversal.IsRelative())
   176  	items := make([]TraverserTokens, len(traversal))
   177  	for i, t := range traversal {
   178  		rng := t.SourceRange()
   179  		leadingToken := m.tokens.atPos(rng.Start)
   180  		indexToken := m.tokens.atOffset(rng.Start.Byte + 1)
   181  		if leadingToken.Raw.Type == hclsyntax.TokenOBrack {
   182  			if indexToken.Raw.Type == hclsyntax.TokenOQuote {
   183  				indexToken = m.collectLabelTokens(hcl.Range{
   184  					Filename: rng.Filename,
   185  					Start:    hcl.Pos{Byte: rng.Start.Byte + 1},
   186  					End:      hcl.Pos{Byte: rng.End.Byte - 1},
   187  				})
   188  			}
   189  			items[i] = &BracketTraverserTokens{
   190  				OpenBracket:  leadingToken,
   191  				Index:        indexToken,
   192  				CloseBracket: m.tokens.atOffset(rng.End.Byte - 1),
   193  			}
   194  		} else {
   195  			items[i] = &DotTraverserTokens{
   196  				Dot:   leadingToken,
   197  				Index: indexToken,
   198  			}
   199  		}
   200  	}
   201  
   202  	return items
   203  }
   204  
   205  func (m *tokenMapper) nodeRange(n hclsyntax.Node) hcl.Range {
   206  	tokens, ok := m.tokenMap[n]
   207  	if !ok {
   208  		return n.Range()
   209  	}
   210  
   211  	filename := n.Range().Filename
   212  
   213  	var parens Parentheses
   214  	var start, end hcl.Pos
   215  	switch n := n.(type) {
   216  	case *hclsyntax.Attribute:
   217  		tokens := tokens.(*AttributeTokens)
   218  		start, end = tokens.Name.Range().Start, m.nodeRange(n.Expr).End
   219  	case *hclsyntax.BinaryOpExpr:
   220  		tokens := tokens.(*BinaryOpTokens)
   221  		parens = tokens.Parentheses
   222  		start, end = m.nodeRange(n.LHS).Start, m.nodeRange(n.RHS).End
   223  	case *hclsyntax.Block:
   224  		tokens := tokens.(*BlockTokens)
   225  		start, end = tokens.Type.Range().Start, tokens.CloseBrace.Range().End
   226  	case *hclsyntax.ConditionalExpr:
   227  		switch tokens := tokens.(type) {
   228  		case *ConditionalTokens:
   229  			parens = tokens.Parentheses
   230  			start, end = m.nodeRange(n.Condition).Start, m.nodeRange(n.FalseResult).End
   231  		case *TemplateConditionalTokens:
   232  			start, end = tokens.OpenIf.Range().Start, tokens.CloseEndif.Range().End
   233  		}
   234  	case *hclsyntax.ForExpr:
   235  		switch tokens := tokens.(type) {
   236  		case *ForTokens:
   237  			parens = tokens.Parentheses
   238  			start, end = tokens.Open.Range().Start, tokens.Close.Range().End
   239  		case *TemplateForTokens:
   240  			start, end = tokens.OpenFor.Range().Start, tokens.CloseEndfor.Range().End
   241  		}
   242  	case *hclsyntax.FunctionCallExpr:
   243  		tokens := tokens.(*FunctionCallTokens)
   244  		parens = tokens.Parentheses
   245  		start, end = tokens.Name.Range().Start, tokens.CloseParen.Range().End
   246  	case *hclsyntax.IndexExpr:
   247  		tokens := tokens.(*IndexTokens)
   248  		parens = tokens.Parentheses
   249  		start, end = m.nodeRange(n.Collection).Start, tokens.CloseBracket.Range().End
   250  	case *hclsyntax.LiteralValueExpr, *hclsyntax.ObjectConsKeyExpr:
   251  		tokens := tokens.(*LiteralValueTokens)
   252  		parens = tokens.Parentheses
   253  		start, end = tokens.Value[0].Range().Start, tokens.Value[len(tokens.Value)-1].Range().End
   254  	case *hclsyntax.ObjectConsExpr:
   255  		tokens := tokens.(*ObjectConsTokens)
   256  		parens = tokens.Parentheses
   257  		start, end = tokens.OpenBrace.Range().Start, tokens.CloseBrace.Range().End
   258  	case *hclsyntax.RelativeTraversalExpr:
   259  		tokens := tokens.(*RelativeTraversalTokens)
   260  		parens = tokens.Parentheses
   261  		start, end = m.nodeRange(n.Source).Start, tokens.Traversal[len(tokens.Traversal)-1].Range().End
   262  	case *hclsyntax.ScopeTraversalExpr:
   263  		tokens := tokens.(*ScopeTraversalTokens)
   264  		parens = tokens.Parentheses
   265  		start, end = tokens.Root.Range().Start, tokens.Root.Range().End
   266  		if len(tokens.Traversal) > 0 {
   267  			end = tokens.Traversal[len(tokens.Traversal)-1].Range().End
   268  		}
   269  	case *hclsyntax.SplatExpr:
   270  		tokens := tokens.(*SplatTokens)
   271  		parens = tokens.Parentheses
   272  		start, end = m.nodeRange(n.Source).Start, m.nodeRange(n.Each).End
   273  	case *hclsyntax.TemplateExpr:
   274  		tokens := tokens.(*TemplateTokens)
   275  		parens = tokens.Parentheses
   276  		start, end = tokens.Open.Range().Start, tokens.Close.Range().End
   277  	case *hclsyntax.TemplateWrapExpr:
   278  		tokens := tokens.(*TemplateTokens)
   279  		parens = tokens.Parentheses
   280  		start, end = tokens.Open.Range().Start, tokens.Close.Range().End
   281  	case *hclsyntax.TupleConsExpr:
   282  		tokens := tokens.(*TupleConsTokens)
   283  		parens = tokens.Parentheses
   284  		start, end = tokens.OpenBracket.Range().Start, tokens.CloseBracket.Range().End
   285  	case *hclsyntax.UnaryOpExpr:
   286  		tokens := tokens.(*UnaryOpTokens)
   287  		parens = tokens.Parentheses
   288  		start, end = tokens.Operator.Range().Start, m.nodeRange(n.Val).End
   289  	}
   290  
   291  	return exprRange(filename, parens, start, end)
   292  }
   293  
   294  func (m *tokenMapper) Enter(n hclsyntax.Node) hcl.Diagnostics {
   295  	switch n := n.(type) {
   296  	case hclsyntax.Attributes, hclsyntax.Blocks, hclsyntax.ChildScope:
   297  		// Do nothing
   298  	default:
   299  		m.stack = append(m.stack, n)
   300  	}
   301  
   302  	switch n := n.(type) {
   303  	case *hclsyntax.ConditionalExpr:
   304  		open := m.tokens.atPos(n.SrcRange.Start)
   305  		if open.Raw.Type == hclsyntax.TokenTemplateControl {
   306  			m.templateControlExprs.Add(n)
   307  		}
   308  	case *hclsyntax.ForExpr:
   309  		open := m.tokens.atPos(n.OpenRange.Start)
   310  		if open.Raw.Type == hclsyntax.TokenTemplateControl {
   311  			m.templateControlExprs.Add(n)
   312  		}
   313  	}
   314  
   315  	// Work around a bug in the HCL syntax library. The walkChildNodes implementation for ObjectConsKeyExpr does not
   316  	// descend into its wrapped expression if the wrapped expression can be interpreted as a keyword even if the
   317  	// syntactical form that _forces_ the wrapped expression to be a non-literal is used.
   318  	if x, ok := n.(*hclsyntax.ObjectConsKeyExpr); ok && x.ForceNonLiteral && hcl.ExprAsKeyword(x.Wrapped) != "" {
   319  		return hclsyntax.Walk(x.Wrapped, m)
   320  	}
   321  	return nil
   322  }
   323  
   324  func (m *tokenMapper) Exit(n hclsyntax.Node) hcl.Diagnostics {
   325  	// Gather parentheses.
   326  	var parens Parentheses
   327  	startParens, endParens := n.Range().Start.Byte-1, n.Range().End.Byte
   328  	for {
   329  		open, close := m.tokens.atOffset(startParens), m.tokens.atOffset(endParens)
   330  		if open.Raw.Type != hclsyntax.TokenOParen || close.Raw.Type != hclsyntax.TokenCParen {
   331  			break
   332  		}
   333  		parens.Open, parens.Close = append(parens.Open, open), append(parens.Close, close)
   334  		startParens, endParens = open.Range().Start.Byte-1, close.Range().End.Byte
   335  	}
   336  	if m.isSingleFunctionCallArg() && len(parens.Open) > 0 {
   337  		parens.Open, parens.Close = parens.Open[:len(parens.Open)-1], parens.Close[:len(parens.Close)-1]
   338  	}
   339  
   340  	var nodeTokens NodeTokens
   341  	switch n := n.(type) {
   342  	case *hclsyntax.Attribute:
   343  		nodeTokens = &AttributeTokens{
   344  			Name:   m.tokens.atPos(n.NameRange.Start),
   345  			Equals: m.tokens.atPos(n.EqualsRange.Start),
   346  		}
   347  	case *hclsyntax.BinaryOpExpr:
   348  		nodeTokens = &BinaryOpTokens{
   349  			Parentheses: parens,
   350  			Operator:    m.tokens.atPos(m.nodeRange(n.LHS).End),
   351  		}
   352  	case *hclsyntax.Block:
   353  		labels := make([]Token, len(n.Labels))
   354  		for i, r := range n.LabelRanges {
   355  			labels[i] = m.collectLabelTokens(r)
   356  		}
   357  		nodeTokens = &BlockTokens{
   358  			Type:       m.tokens.atPos(n.TypeRange.Start),
   359  			Labels:     labels,
   360  			OpenBrace:  m.tokens.atPos(n.OpenBraceRange.Start),
   361  			CloseBrace: m.tokens.atPos(n.CloseBraceRange.Start),
   362  		}
   363  	case *hclsyntax.ConditionalExpr:
   364  		condRange := m.nodeRange(n.Condition)
   365  		trueRange := m.nodeRange(n.TrueResult)
   366  		falseRange := m.nodeRange(n.FalseResult)
   367  
   368  		if m.templateControlExprs.Has(n) {
   369  			condition := m.tokens.atPos(condRange.Start)
   370  
   371  			ift := m.tokens.atOffset(condition.Range().Start.Byte - 1)
   372  			openIf := m.tokens.atOffset(ift.Range().Start.Byte - 1)
   373  			closeIf := m.tokens.atPos(condRange.End)
   374  
   375  			openEndifOrElse := m.tokens.atPos(trueRange.End)
   376  			endifOrElse := m.tokens.atPos(openEndifOrElse.Range().End)
   377  			closeEndifOrElse := m.tokens.atPos(endifOrElse.Range().End)
   378  
   379  			var openElse, elset, closeElse *Token
   380  			if endifOrElse.Raw.Type == hclsyntax.TokenIdent && string(endifOrElse.Raw.Bytes) == "else" {
   381  				open, t, close := openEndifOrElse, endifOrElse, closeEndifOrElse
   382  				openElse, elset, closeElse = &open, &t, &close
   383  
   384  				openEndifOrElse = m.tokens.atPos(falseRange.End)
   385  				endifOrElse = m.tokens.atPos(openEndifOrElse.Range().End)
   386  				closeEndifOrElse = m.tokens.atPos(endifOrElse.Range().End)
   387  			}
   388  
   389  			nodeTokens = &TemplateConditionalTokens{
   390  				OpenIf:     openIf,
   391  				If:         ift,
   392  				CloseIf:    closeIf,
   393  				OpenElse:   openElse,
   394  				Else:       elset,
   395  				CloseElse:  closeElse,
   396  				OpenEndif:  openEndifOrElse,
   397  				Endif:      endifOrElse,
   398  				CloseEndif: closeEndifOrElse,
   399  			}
   400  		} else {
   401  			nodeTokens = &ConditionalTokens{
   402  				Parentheses:  parens,
   403  				QuestionMark: m.tokens.atPos(condRange.End),
   404  				Colon:        m.tokens.atPos(trueRange.End),
   405  			}
   406  		}
   407  	case *hclsyntax.ForExpr:
   408  		openToken := m.tokens.atPos(n.OpenRange.Start)
   409  		forToken := m.tokens.atPos(openToken.Range().End)
   410  
   411  		var keyToken, commaToken *Token
   412  		var valueToken Token
   413  		if n.KeyVar != "" {
   414  			key := m.tokens.atPos(forToken.Range().End)
   415  			comma := m.tokens.atPos(key.Range().End)
   416  			value := m.tokens.atPos(comma.Range().End)
   417  
   418  			keyToken, commaToken, valueToken = &key, &comma, value
   419  		} else {
   420  			valueToken = m.tokens.atPos(forToken.Range().End)
   421  		}
   422  
   423  		var arrowToken *Token
   424  		if n.KeyExpr != nil {
   425  			arrow := m.tokens.atPos(m.nodeRange(n.KeyExpr).End)
   426  			arrowToken = &arrow
   427  		}
   428  
   429  		valRange := m.nodeRange(n.ValExpr)
   430  
   431  		var groupToken *Token
   432  		if n.Group {
   433  			group := m.tokens.atPos(valRange.End)
   434  			groupToken = &group
   435  		}
   436  
   437  		var ifToken *Token
   438  		if n.CondExpr != nil {
   439  			pos := valRange.End
   440  			if groupToken != nil {
   441  				pos = groupToken.Range().End
   442  			}
   443  			ift := m.tokens.atPos(pos)
   444  			ifToken = &ift
   445  		}
   446  
   447  		if m.templateControlExprs.Has(n) {
   448  			closeFor := m.tokens.atPos(m.nodeRange(n.CollExpr).End)
   449  
   450  			openEndfor := m.tokens.atPos(valRange.End)
   451  			endfor := m.tokens.atPos(openEndfor.Range().End)
   452  			closeEndfor := m.tokens.atPos(endfor.Range().End)
   453  
   454  			nodeTokens = &TemplateForTokens{
   455  				OpenFor:     openToken,
   456  				For:         forToken,
   457  				Key:         keyToken,
   458  				Comma:       commaToken,
   459  				Value:       valueToken,
   460  				In:          m.tokens.atPos(valueToken.Range().End),
   461  				CloseFor:    closeFor,
   462  				OpenEndfor:  openEndfor,
   463  				Endfor:      endfor,
   464  				CloseEndfor: closeEndfor,
   465  			}
   466  		} else {
   467  			nodeTokens = &ForTokens{
   468  				Parentheses: parens,
   469  				Open:        openToken,
   470  				For:         forToken,
   471  				Key:         keyToken,
   472  				Comma:       commaToken,
   473  				Value:       valueToken,
   474  				In:          m.tokens.atPos(valueToken.Range().End),
   475  				Colon:       m.tokens.atPos(m.nodeRange(n.CollExpr).End),
   476  				Arrow:       arrowToken,
   477  				Group:       groupToken,
   478  				If:          ifToken,
   479  				Close:       m.tokens.atPos(n.CloseRange.Start),
   480  			}
   481  		}
   482  	case *hclsyntax.FunctionCallExpr:
   483  		args := n.Args
   484  		commas := make([]Token, 0, len(args))
   485  		if len(args) > 0 {
   486  			for _, ex := range args[:len(args)-1] {
   487  				commas = append(commas, m.tokens.atPos(m.nodeRange(ex).End))
   488  			}
   489  			if trailing := m.tokens.atPos(m.nodeRange(args[len(args)-1]).End); trailing.Raw.Type == hclsyntax.TokenComma {
   490  				commas = append(commas, trailing)
   491  			}
   492  		}
   493  		nodeTokens = &FunctionCallTokens{
   494  			Parentheses: parens,
   495  			Name:        m.tokens.atPos(n.NameRange.Start),
   496  			OpenParen:   m.tokens.atPos(n.OpenParenRange.Start),
   497  			Commas:      commas,
   498  			CloseParen:  m.tokens.atPos(n.CloseParenRange.Start),
   499  		}
   500  	case *hclsyntax.IndexExpr:
   501  		nodeTokens = &IndexTokens{
   502  			Parentheses:  parens,
   503  			OpenBracket:  m.tokens.atPos(n.OpenRange.Start),
   504  			CloseBracket: m.tokens.atOffset(n.BracketRange.End.Byte - 1),
   505  		}
   506  	case *hclsyntax.LiteralValueExpr:
   507  		nodeTokens = &LiteralValueTokens{
   508  			Parentheses: parens,
   509  			Value:       m.tokens.inRange(n.Range()),
   510  		}
   511  	case *hclsyntax.ObjectConsKeyExpr:
   512  		nodeTokens = &LiteralValueTokens{
   513  			Parentheses: parens,
   514  			Value:       m.tokens.inRange(n.Range()),
   515  		}
   516  	case *hclsyntax.ObjectConsExpr:
   517  		items := make([]ObjectConsItemTokens, len(n.Items))
   518  		for i, item := range n.Items {
   519  			var comma *Token
   520  			if t := m.tokens.atPos(m.nodeRange(item.ValueExpr).End); t.Raw.Type == hclsyntax.TokenComma {
   521  				comma = &t
   522  			}
   523  			items[i] = ObjectConsItemTokens{
   524  				Equals: m.tokens.atPos(m.nodeRange(item.KeyExpr).End),
   525  				Comma:  comma,
   526  			}
   527  		}
   528  		nodeTokens = &ObjectConsTokens{
   529  			Parentheses: parens,
   530  			OpenBrace:   m.tokens.atPos(n.OpenRange.Start),
   531  			Items:       items,
   532  			CloseBrace:  m.tokens.atOffset(n.SrcRange.End.Byte - 1),
   533  		}
   534  	case *hclsyntax.RelativeTraversalExpr:
   535  		nodeTokens = &RelativeTraversalTokens{
   536  			Parentheses: parens,
   537  			Traversal:   m.mapRelativeTraversalTokens(n.Traversal),
   538  		}
   539  	case *hclsyntax.ScopeTraversalExpr:
   540  		nodeTokens = &ScopeTraversalTokens{
   541  			Parentheses: parens,
   542  			Root:        m.tokens.atPos(n.Traversal[0].SourceRange().Start),
   543  			Traversal:   m.mapRelativeTraversalTokens(n.Traversal[1:]),
   544  		}
   545  	case *hclsyntax.SplatExpr:
   546  		openToken := m.tokens.atPos(m.nodeRange(n.Source).End)
   547  		starToken := m.tokens.atPos(openToken.Range().End)
   548  		var closeToken *Token
   549  		if openToken.Raw.Type == hclsyntax.TokenOBrack {
   550  			cbrack := m.tokens.atPos(starToken.Range().End)
   551  			closeToken = &cbrack
   552  		}
   553  		nodeTokens = &SplatTokens{
   554  			Parentheses: parens,
   555  			Open:        openToken,
   556  			Star:        starToken,
   557  			Close:       closeToken,
   558  		}
   559  	case *hclsyntax.TemplateExpr:
   560  		// NOTE: the HCL parser lifts control sequences into if or for expressions. This is handled in the correspoding
   561  		// mappers.
   562  		if m.inTemplateControl() {
   563  			nodeTokens = &TemplateTokens{
   564  				Open: Token{
   565  					Raw: hclsyntax.Token{
   566  						Range: hcl.Range{
   567  							Filename: n.SrcRange.Filename,
   568  							Start:    n.SrcRange.Start,
   569  							End:      n.SrcRange.Start,
   570  						},
   571  					},
   572  				},
   573  				Close: Token{
   574  					Raw: hclsyntax.Token{
   575  						Range: hcl.Range{
   576  							Filename: n.SrcRange.Filename,
   577  							Start:    n.SrcRange.End,
   578  							End:      n.SrcRange.End,
   579  						},
   580  					},
   581  				},
   582  			}
   583  		} else {
   584  			// If we're inside the body of a template if or for, there cannot be any delimiting tokens.
   585  			nodeTokens = &TemplateTokens{
   586  				Parentheses: parens,
   587  				Open:        m.tokens.atPos(n.SrcRange.Start),
   588  				Close:       m.tokens.atOffset(n.SrcRange.End.Byte - 1),
   589  			}
   590  		}
   591  	case *hclsyntax.TemplateWrapExpr:
   592  		nodeTokens = &TemplateTokens{
   593  			Parentheses: parens,
   594  			Open:        m.tokens.atPos(n.SrcRange.Start),
   595  			Close:       m.tokens.atOffset(n.SrcRange.End.Byte - 1),
   596  		}
   597  	case *hclsyntax.TupleConsExpr:
   598  		exprs := n.Exprs
   599  		commas := make([]Token, 0, len(exprs))
   600  		if len(exprs) > 0 {
   601  			for _, ex := range exprs[:len(exprs)-1] {
   602  				commas = append(commas, m.tokens.atPos(m.nodeRange(ex).End))
   603  			}
   604  			if trailing := m.tokens.atPos(m.nodeRange(exprs[len(exprs)-1]).End); trailing.Raw.Type == hclsyntax.TokenComma {
   605  				commas = append(commas, trailing)
   606  			}
   607  		}
   608  		nodeTokens = &TupleConsTokens{
   609  			Parentheses:  parens,
   610  			OpenBracket:  m.tokens.atPos(n.OpenRange.Start),
   611  			Commas:       commas,
   612  			CloseBracket: m.tokens.atOffset(n.SrcRange.End.Byte - 1),
   613  		}
   614  	case *hclsyntax.UnaryOpExpr:
   615  		nodeTokens = &UnaryOpTokens{
   616  			Parentheses: parens,
   617  			Operator:    m.tokens.atPos(n.SymbolRange.Start),
   618  		}
   619  	}
   620  	if nodeTokens != nil {
   621  		m.tokenMap[n] = nodeTokens
   622  	}
   623  
   624  	if n == m.stack[len(m.stack)-1] {
   625  		m.stack = m.stack[:len(m.stack)-1]
   626  	}
   627  
   628  	return nil
   629  }
   630  
   631  // mapTokens builds a mapping from the syntax nodes in the given source file to their tokens. The mapping is recorded
   632  // in the map passed in to the function.
   633  func mapTokens(rawTokens hclsyntax.Tokens, filename string, root hclsyntax.Node, contents []byte, tokenMap tokenMap,
   634  	initialPos hcl.Pos) {
   635  
   636  	// Turn the list of raw tokens into a list of trivia-carrying tokens.
   637  	lastEndPos := initialPos
   638  	var tokens tokenList
   639  	trivia := TriviaList{}
   640  	inControlSeq := false
   641  	for _, raw := range rawTokens {
   642  		// Snip whitespace out of the body and turn it in to trivia.
   643  		if startPos := raw.Range.Start; startPos.Byte != lastEndPos.Byte {
   644  			triviaBytes := contents[lastEndPos.Byte-initialPos.Byte : startPos.Byte-initialPos.Byte]
   645  
   646  			// If this trivia begins a new line, attach the current trivia to the last processed token, if any.
   647  			if len(tokens) > 0 {
   648  				if nl := bytes.IndexByte(triviaBytes, '\n'); nl != -1 {
   649  					trailingTriviaBytes := triviaBytes[:nl+1]
   650  					triviaBytes = triviaBytes[nl+1:]
   651  
   652  					endPos := hcl.Pos{Line: lastEndPos.Line + 1, Column: 0, Byte: lastEndPos.Byte + nl + 1}
   653  					rng := hcl.Range{Filename: filename, Start: lastEndPos, End: endPos}
   654  					trivia = append(trivia, Whitespace{rng: rng, bytes: trailingTriviaBytes})
   655  					tokens[len(tokens)-1].TrailingTrivia, trivia = trivia, TriviaList{}
   656  
   657  					lastEndPos = endPos
   658  				}
   659  			}
   660  
   661  			rng := hcl.Range{Filename: filename, Start: lastEndPos, End: startPos}
   662  			trivia = append(trivia, Whitespace{rng: rng, bytes: triviaBytes})
   663  		}
   664  
   665  		switch raw.Type {
   666  		case hclsyntax.TokenComment:
   667  			trivia = append(trivia, Comment{Lines: processComment(raw.Bytes), rng: raw.Range, bytes: raw.Bytes})
   668  		case hclsyntax.TokenTemplateInterp:
   669  			// Treat these as trailing trivia.
   670  			trivia = append(trivia, TemplateDelimiter{Type: raw.Type, rng: raw.Range, bytes: raw.Bytes})
   671  			if len(tokens) > 0 {
   672  				tokens[len(tokens)-1].TrailingTrivia, trivia = trivia, TriviaList{}
   673  			}
   674  		case hclsyntax.TokenTemplateControl:
   675  			tokens, trivia = append(tokens, Token{Raw: raw, LeadingTrivia: trivia}), TriviaList{}
   676  			inControlSeq = true
   677  		case hclsyntax.TokenTemplateSeqEnd:
   678  			// If this terminates a template control sequence, it is a proper token. Otherwise, it is treated as leading
   679  			// trivia.
   680  			if !inControlSeq {
   681  				tokens[len(tokens)-1].TrailingTrivia = trivia
   682  				trivia = TriviaList{TemplateDelimiter{Type: raw.Type, rng: raw.Range, bytes: raw.Bytes}}
   683  			} else {
   684  				tokens, trivia = append(tokens, Token{Raw: raw, LeadingTrivia: trivia}), TriviaList{}
   685  			}
   686  			inControlSeq = false
   687  		case hclsyntax.TokenNewline, hclsyntax.TokenBitwiseAnd, hclsyntax.TokenBitwiseOr,
   688  			hclsyntax.TokenBitwiseNot, hclsyntax.TokenBitwiseXor, hclsyntax.TokenStarStar, hclsyntax.TokenApostrophe,
   689  			hclsyntax.TokenBacktick, hclsyntax.TokenSemicolon, hclsyntax.TokenTabs, hclsyntax.TokenInvalid,
   690  			hclsyntax.TokenBadUTF8, hclsyntax.TokenQuotedNewline:
   691  			// Treat these as whitespace. We cannot omit their bytes from the list entirely, as the algorithm below
   692  			// that maps positions to tokens requires that every byte in the source file is covered by the token list.
   693  			continue
   694  		default:
   695  			tokens, trivia = append(tokens, Token{Raw: raw, LeadingTrivia: trivia}), TriviaList{}
   696  		}
   697  		lastEndPos = raw.Range.End
   698  	}
   699  
   700  	// If we had any tokens, we should have attached all trivia to something.
   701  	contract.Assert(len(trivia) == 0 || len(tokens) == 0)
   702  
   703  	// Now build the token map.
   704  	//
   705  	// If a node records the ranges of relevant tokens in its syntax node, the start position of those ranges is used
   706  	// to fetch the corresponding token.
   707  	//
   708  	// If a node does not record the range of a relevant token, the end position of a preceding element (e.g. an
   709  	// expression or a token) is used to find the token.
   710  	//
   711  	// TODO(pdg): handle parenthesized expressions
   712  	diags := hclsyntax.Walk(root, &tokenMapper{
   713  		tokenMap:             tokenMap,
   714  		tokens:               tokens,
   715  		templateControlExprs: codegen.Set{},
   716  	})
   717  	contract.Assert(diags == nil)
   718  
   719  	// If the root was a Body and there is a trailing end-of-file token, attach it to the body.
   720  	body, isBody := root.(*hclsyntax.Body)
   721  	if isBody && len(tokens) > 0 && tokens[len(tokens)-1].Raw.Type == hclsyntax.TokenEOF {
   722  		tokenMap[body] = &BodyTokens{EndOfFile: &tokens[len(tokens)-1]}
   723  	}
   724  }
   725  
   726  // processComment separates the given comment into lines and attempts to remove comment tokens.
   727  func processComment(bytes []byte) []string {
   728  	comment := string(bytes)
   729  
   730  	// Each HCL comment may be either a line comment or a block comment. Line comments start with '#' or '//' and
   731  	// terminate with an EOL. Block comments begin with a '/*' and terminate with a '*/'. All comment delimiters are
   732  	// preserved in the HCL comment text.
   733  	//
   734  	// To make life easier for the code generators, HCL comments are pre-processed to remove comment delimiters. For
   735  	// line comments, this process is trivial. For block comments, things are a bit more involved.
   736  	switch {
   737  	case comment[0] == '#':
   738  		return []string{strings.TrimSuffix(comment[1:], "\n")}
   739  	case comment[0:2] == "//":
   740  		return []string{strings.TrimSuffix(comment[2:], "\n")}
   741  	default:
   742  		return processBlockComment(comment)
   743  	}
   744  }
   745  
   746  // These regexes are used by processBlockComment. The first matches a block comment start, the second a block comment
   747  // end, and the third a block comment line prefix.
   748  var blockStartPat = regexp.MustCompile(`^/\*+`)
   749  var blockEndPat = regexp.MustCompile(`[[:space:]]*\*+/$`)
   750  var blockPrefixPat = regexp.MustCompile(`^[[:space:]]*\*`)
   751  
   752  // processBlockComment splits a block comment into mutiple lines, removes comment delimiters, and attempts to remove
   753  // common comment prefixes from interior lines. For example, the following HCL block comment:
   754  //
   755  //     /**
   756  //      * This is a block comment!
   757  //      *
   758  //      * It has multiple lines.
   759  //      */
   760  //
   761  // becomes this set of lines:
   762  //
   763  //     []string{" This is a block comment!", "", " It has multiple lines."}
   764  //
   765  func processBlockComment(text string) []string {
   766  	lines := strings.Split(text, "\n")
   767  
   768  	// We will always trim the following:
   769  	// - '^/\*+' from the first line
   770  	// - a trailing '[[:space:]]\*+/$' from the last line
   771  
   772  	// In addition, we will trim '^[[:space:]]*\*' from the second through last lines iff all lines in that set share
   773  	// a prefix that matches that pattern.
   774  
   775  	prefix := ""
   776  	for i, l := range lines[1:] {
   777  		m := blockPrefixPat.FindString(l)
   778  		if i == 0 {
   779  			prefix = m
   780  		} else if m != prefix {
   781  			prefix = ""
   782  			break
   783  		}
   784  	}
   785  
   786  	for i, l := range lines {
   787  		switch i {
   788  		case 0:
   789  			start := blockStartPat.FindString(l)
   790  			contract.Assert(start != "")
   791  			l = l[len(start):]
   792  
   793  			// If this is a single-line block comment, trim the end pattern as well.
   794  			if len(lines) == 1 {
   795  				contract.Assert(prefix == "")
   796  
   797  				if end := blockEndPat.FindString(l); end != "" {
   798  					l = l[:len(l)-len(end)]
   799  				}
   800  			}
   801  		case len(lines) - 1:
   802  			// The prefix we're trimming may overlap with the end pattern we're trimming. In this case, trim the entire
   803  			// line.
   804  			if len(l)-len(prefix) == 1 {
   805  				l = ""
   806  			} else {
   807  				l = l[len(prefix):]
   808  				if end := blockEndPat.FindString(l); end != "" {
   809  					l = l[:len(l)-len(end)]
   810  				}
   811  			}
   812  		default:
   813  			// Trim the prefix.
   814  			l = l[len(prefix):]
   815  		}
   816  
   817  		lines[i] = l
   818  	}
   819  
   820  	// If the first or last line is empty, drop it.
   821  	if lines[0] == "" {
   822  		lines = lines[1:]
   823  	}
   824  	if len(lines) > 0 && lines[len(lines)-1] == "" {
   825  		lines = lines[:len(lines)-1]
   826  	}
   827  
   828  	return lines
   829  }