github.com/hashicorp/hcl/v2@v2.20.0/hclsyntax/public.go (about) 1 // Copyright (c) HashiCorp, Inc. 2 // SPDX-License-Identifier: MPL-2.0 3 4 package hclsyntax 5 6 import ( 7 "github.com/hashicorp/hcl/v2" 8 ) 9 10 // ParseConfig parses the given buffer as a whole HCL config file, returning 11 // a *hcl.File representing its contents. If HasErrors called on the returned 12 // diagnostics returns true, the returned body is likely to be incomplete 13 // and should therefore be used with care. 14 // 15 // The body in the returned file has dynamic type *hclsyntax.Body, so callers 16 // may freely type-assert this to get access to the full hclsyntax API in 17 // situations where detailed access is required. However, most common use-cases 18 // should be served using the hcl.Body interface to ensure compatibility with 19 // other configurationg syntaxes, such as JSON. 20 func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) { 21 tokens, diags := LexConfig(src, filename, start) 22 peeker := newPeeker(tokens, false) 23 parser := &parser{peeker: peeker} 24 body, parseDiags := parser.ParseBody(TokenEOF) 25 diags = append(diags, parseDiags...) 26 27 // Panic if the parser uses incorrect stack discipline with the peeker's 28 // newlines stack, since otherwise it will produce confusing downstream 29 // errors. 30 peeker.AssertEmptyIncludeNewlinesStack() 31 32 return &hcl.File{ 33 Body: body, 34 Bytes: src, 35 36 Nav: navigation{ 37 root: body, 38 }, 39 }, diags 40 } 41 42 // ParseExpression parses the given buffer as a standalone HCL expression, 43 // returning it as an instance of Expression. 44 func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) { 45 tokens, diags := LexExpression(src, filename, start) 46 peeker := newPeeker(tokens, false) 47 parser := &parser{peeker: peeker} 48 49 // Bare expressions are always parsed in "ignore newlines" mode, as if 50 // they were wrapped in parentheses. 51 parser.PushIncludeNewlines(false) 52 53 expr, parseDiags := parser.ParseExpression() 54 diags = append(diags, parseDiags...) 55 56 next := parser.Peek() 57 if next.Type != TokenEOF && !parser.recovery { 58 diags = append(diags, &hcl.Diagnostic{ 59 Severity: hcl.DiagError, 60 Summary: "Extra characters after expression", 61 Detail: "An expression was successfully parsed, but extra characters were found after it.", 62 Subject: &next.Range, 63 }) 64 } 65 66 parser.PopIncludeNewlines() 67 68 // Panic if the parser uses incorrect stack discipline with the peeker's 69 // newlines stack, since otherwise it will produce confusing downstream 70 // errors. 71 peeker.AssertEmptyIncludeNewlinesStack() 72 73 return expr, diags 74 } 75 76 // ParseTemplate parses the given buffer as a standalone HCL template, 77 // returning it as an instance of Expression. 78 func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) { 79 tokens, diags := LexTemplate(src, filename, start) 80 peeker := newPeeker(tokens, false) 81 parser := &parser{peeker: peeker} 82 expr, parseDiags := parser.ParseTemplate() 83 diags = append(diags, parseDiags...) 84 85 // Panic if the parser uses incorrect stack discipline with the peeker's 86 // newlines stack, since otherwise it will produce confusing downstream 87 // errors. 88 peeker.AssertEmptyIncludeNewlinesStack() 89 90 return expr, diags 91 } 92 93 // ParseTraversalAbs parses the given buffer as a standalone absolute traversal. 94 // 95 // Parsing as a traversal is more limited than parsing as an expession since 96 // it allows only attribute and indexing operations on variables. Traverals 97 // are useful as a syntax for referring to objects without necessarily 98 // evaluating them. 99 func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) { 100 tokens, diags := LexExpression(src, filename, start) 101 peeker := newPeeker(tokens, false) 102 parser := &parser{peeker: peeker} 103 104 // Bare traverals are always parsed in "ignore newlines" mode, as if 105 // they were wrapped in parentheses. 106 parser.PushIncludeNewlines(false) 107 108 expr, parseDiags := parser.ParseTraversalAbs() 109 diags = append(diags, parseDiags...) 110 111 parser.PopIncludeNewlines() 112 113 // Panic if the parser uses incorrect stack discipline with the peeker's 114 // newlines stack, since otherwise it will produce confusing downstream 115 // errors. 116 peeker.AssertEmptyIncludeNewlinesStack() 117 118 return expr, diags 119 } 120 121 // LexConfig performs lexical analysis on the given buffer, treating it as a 122 // whole HCL config file, and returns the resulting tokens. 123 // 124 // Only minimal validation is done during lexical analysis, so the returned 125 // diagnostics may include errors about lexical issues such as bad character 126 // encodings or unrecognized characters, but full parsing is required to 127 // detect _all_ syntax errors. 128 func LexConfig(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { 129 tokens := scanTokens(src, filename, start, scanNormal) 130 diags := checkInvalidTokens(tokens) 131 return tokens, diags 132 } 133 134 // LexExpression performs lexical analysis on the given buffer, treating it as 135 // a standalone HCL expression, and returns the resulting tokens. 136 // 137 // Only minimal validation is done during lexical analysis, so the returned 138 // diagnostics may include errors about lexical issues such as bad character 139 // encodings or unrecognized characters, but full parsing is required to 140 // detect _all_ syntax errors. 141 func LexExpression(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { 142 // This is actually just the same thing as LexConfig, since configs 143 // and expressions lex in the same way. 144 tokens := scanTokens(src, filename, start, scanNormal) 145 diags := checkInvalidTokens(tokens) 146 return tokens, diags 147 } 148 149 // LexTemplate performs lexical analysis on the given buffer, treating it as a 150 // standalone HCL template, and returns the resulting tokens. 151 // 152 // Only minimal validation is done during lexical analysis, so the returned 153 // diagnostics may include errors about lexical issues such as bad character 154 // encodings or unrecognized characters, but full parsing is required to 155 // detect _all_ syntax errors. 156 func LexTemplate(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) { 157 tokens := scanTokens(src, filename, start, scanTemplate) 158 diags := checkInvalidTokens(tokens) 159 return tokens, diags 160 } 161 162 // ValidIdentifier tests if the given string could be a valid identifier in 163 // a native syntax expression. 164 // 165 // This is useful when accepting names from the user that will be used as 166 // variable or attribute names in the scope, to ensure that any name chosen 167 // will be traversable using the variable or attribute traversal syntax. 168 func ValidIdentifier(s string) bool { 169 // This is a kinda-expensive way to do something pretty simple, but it 170 // is easiest to do with our existing scanner-related infrastructure here 171 // and nobody should be validating identifiers in a tight loop. 172 tokens := scanTokens([]byte(s), "", hcl.Pos{}, scanIdentOnly) 173 return len(tokens) == 2 && tokens[0].Type == TokenIdent && tokens[1].Type == TokenEOF 174 }