github.com/jd-ly/tools@v0.5.7/internal/lsp/semantic.go (about)

     1  // Copyright 2020 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package lsp
     6  
     7  import (
     8  	"bytes"
     9  	"context"
    10  	"fmt"
    11  	"go/ast"
    12  	"go/token"
    13  	"go/types"
    14  	"log"
    15  	"sort"
    16  	"strings"
    17  	"time"
    18  
    19  	"github.com/jd-ly/tools/internal/event"
    20  	"github.com/jd-ly/tools/internal/lsp/protocol"
    21  	"github.com/jd-ly/tools/internal/lsp/source"
    22  	errors "golang.org/x/xerrors"
    23  )
    24  
    25  func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
    26  	ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil)
    27  	return ret, err
    28  }
    29  
    30  func (s *Server) semanticTokensFullDelta(ctx context.Context, p *protocol.SemanticTokensDeltaParams) (interface{}, error) {
    31  	return nil, errors.Errorf("implement SemanticTokensFullDelta")
    32  }
    33  
    34  func (s *Server) semanticTokensRange(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
    35  	ret, err := s.computeSemanticTokens(ctx, p.TextDocument, &p.Range)
    36  	return ret, err
    37  }
    38  
    39  func (s *Server) semanticTokensRefresh(ctx context.Context) error {
    40  	// in the code, but not in the protocol spec
    41  	return errors.Errorf("implement SemanticTokensRefresh")
    42  }
    43  
    44  func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) {
    45  	ans := protocol.SemanticTokens{
    46  		Data: []float64{},
    47  	}
    48  	snapshot, _, ok, release, err := s.beginFileRequest(ctx, td.URI, source.Go)
    49  	defer release()
    50  	if !ok {
    51  		return nil, err
    52  	}
    53  	vv := snapshot.View()
    54  	if !vv.Options().SemanticTokens {
    55  		// return an error, so if the option changes
    56  		// the client won't remember the wrong answer
    57  		return nil, errors.Errorf("semantictokens are disabled")
    58  	}
    59  	pkg, err := snapshot.PackageForFile(ctx, td.URI.SpanURI(), source.TypecheckFull, source.WidestPackage)
    60  	if err != nil {
    61  		return nil, err
    62  	}
    63  	info := pkg.GetTypesInfo()
    64  	pgf, err := pkg.File(td.URI.SpanURI())
    65  	if err != nil {
    66  		return nil, err
    67  	}
    68  	if pgf.ParseErr != nil {
    69  		return nil, pgf.ParseErr
    70  	}
    71  	e := &encoded{
    72  		ctx:      ctx,
    73  		pgf:      pgf,
    74  		rng:      rng,
    75  		ti:       info,
    76  		fset:     snapshot.FileSet(),
    77  		tokTypes: s.session.Options().SemanticTypes,
    78  		tokMods:  s.session.Options().SemanticMods,
    79  	}
    80  	if err := e.init(); err != nil {
    81  		return nil, err
    82  	}
    83  	e.semantics()
    84  	ans.Data, err = e.Data()
    85  	if err != nil {
    86  		// this is an internal error, likely caused by a typo
    87  		// for a token or modifier
    88  		return nil, err
    89  	}
    90  	// for small cache, some day. for now, the client ignores this
    91  	ans.ResultID = fmt.Sprintf("%v", time.Now())
    92  	return &ans, nil
    93  }
    94  
    95  func (e *encoded) semantics() {
    96  	f := e.pgf.File
    97  	e.token(f.Package, len("package"), tokKeyword, nil)
    98  	e.token(f.Name.NamePos, len(f.Name.Name), tokNamespace, nil)
    99  	inspect := func(n ast.Node) bool {
   100  		return e.inspector(n)
   101  	}
   102  	for _, d := range f.Decls {
   103  		// only look at the decls that overlap the range
   104  		start, end := d.Pos(), d.End()
   105  		if end <= e.start || start >= e.end {
   106  			continue
   107  		}
   108  		ast.Inspect(d, inspect)
   109  	}
   110  }
   111  
   112  type tokenType string
   113  
   114  const (
   115  	tokNamespace tokenType = "namespace"
   116  	tokType      tokenType = "type"
   117  	tokInterface tokenType = "interface"
   118  	tokParameter tokenType = "parameter"
   119  	tokVariable  tokenType = "variable"
   120  	tokMember    tokenType = "member"
   121  	tokFunction  tokenType = "function"
   122  	tokKeyword   tokenType = "keyword"
   123  	tokComment   tokenType = "comment"
   124  	tokString    tokenType = "string"
   125  	tokNumber    tokenType = "number"
   126  	tokOperator  tokenType = "operator"
   127  )
   128  
   129  func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) {
   130  	if start == 0 {
   131  		e.unexpected("token at token.NoPos")
   132  	}
   133  	if start >= e.end || start+token.Pos(leng) <= e.start {
   134  		return
   135  	}
   136  	// want a line and column from start (in LSP coordinates)
   137  	// [//line directives should be ignored]
   138  	rng := source.NewMappedRange(e.fset, e.pgf.Mapper, start, start+token.Pos(leng))
   139  	lspRange, err := rng.Range()
   140  	if err != nil {
   141  		// possibly a //line directive. TODO(pjw): fix this somehow
   142  		// "column mapper is for file...instead of..."
   143  		// "line is beyond end of file..."
   144  		// see line 116 of internal/span/token.go which uses Position not PositionFor
   145  		event.Error(e.ctx, "failed to convert to range", err)
   146  		return
   147  	}
   148  	if lspRange.End.Line != lspRange.Start.Line {
   149  		// abrupt end of file, without \n. TODO(pjw): fix?
   150  		pos := e.fset.PositionFor(start, false)
   151  		msg := fmt.Sprintf("token at %s:%d.%d overflows", pos.Filename, pos.Line, pos.Column)
   152  		event.Log(e.ctx, msg)
   153  		return
   154  	}
   155  	// token is all on one line
   156  	length := lspRange.End.Character - lspRange.Start.Character
   157  	e.add(lspRange.Start.Line, lspRange.Start.Character, length, typ, mods)
   158  }
   159  
   160  func (e *encoded) add(line, start float64, len float64, tok tokenType, mod []string) {
   161  	x := semItem{line, start, len, tok, mod}
   162  	e.items = append(e.items, x)
   163  }
   164  
   165  // semItem represents a token found walking the parse tree
   166  type semItem struct {
   167  	line, start float64
   168  	len         float64
   169  	typeStr     tokenType
   170  	mods        []string
   171  }
   172  
   173  type encoded struct {
   174  	// the generated data
   175  	items []semItem
   176  
   177  	ctx               context.Context
   178  	tokTypes, tokMods []string
   179  	pgf               *source.ParsedGoFile
   180  	rng               *protocol.Range
   181  	ti                *types.Info
   182  	fset              *token.FileSet
   183  	// allowed starting and ending token.Pos, set by init
   184  	// used to avoid looking at declarations not in range
   185  	start, end token.Pos
   186  	// path from the root of the parse tree, used for debugging
   187  	stack []ast.Node
   188  }
   189  
   190  // convert the stack to a string, for debugging
   191  func (e *encoded) strStack() string {
   192  	msg := []string{"["}
   193  	for _, s := range e.stack {
   194  		msg = append(msg, fmt.Sprintf("%T", s)[5:])
   195  	}
   196  	if len(e.stack) > 0 {
   197  		loc := e.stack[len(e.stack)-1].Pos()
   198  		add := e.pgf.Tok.PositionFor(loc, false)
   199  		msg = append(msg, fmt.Sprintf("(line:%d,col:%d)", add.Line, add.Column))
   200  	}
   201  	msg = append(msg, "]")
   202  	return strings.Join(msg, " ")
   203  }
   204  
   205  func (e *encoded) inspector(n ast.Node) bool {
   206  	pop := func() {
   207  		e.stack = e.stack[:len(e.stack)-1]
   208  	}
   209  	if n == nil {
   210  		pop()
   211  		return true
   212  	}
   213  	e.stack = append(e.stack, n)
   214  	switch x := n.(type) {
   215  	case *ast.ArrayType:
   216  	case *ast.AssignStmt:
   217  		e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
   218  	case *ast.BasicLit:
   219  		// if it extends across a line, skip it
   220  		// better would be to mark each line as string TODO(pjw)
   221  		if strings.Contains(x.Value, "\n") {
   222  			break
   223  		}
   224  		ln := len(x.Value)
   225  		what := tokNumber
   226  		if x.Kind == token.STRING {
   227  			what = tokString
   228  			if _, ok := e.stack[len(e.stack)-2].(*ast.Field); ok {
   229  				// struct tags (this is probably pointless, as the
   230  				// TextMate grammar will treat all the other comments the same)
   231  				what = tokComment
   232  			}
   233  		}
   234  		e.token(x.Pos(), ln, what, nil)
   235  	case *ast.BinaryExpr:
   236  		e.token(x.OpPos, len(x.Op.String()), tokOperator, nil)
   237  	case *ast.BlockStmt:
   238  	case *ast.BranchStmt:
   239  		e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil)
   240  		// There's no semantic encoding for labels
   241  	case *ast.CallExpr:
   242  		if x.Ellipsis != token.NoPos {
   243  			e.token(x.Ellipsis, len("..."), tokOperator, nil)
   244  		}
   245  	case *ast.CaseClause:
   246  		iam := "case"
   247  		if x.List == nil {
   248  			iam = "default"
   249  		}
   250  		e.token(x.Case, len(iam), tokKeyword, nil)
   251  	case *ast.ChanType:
   252  		// chan | chan <- | <- chan
   253  		if x.Arrow == token.NoPos || x.Arrow != x.Begin {
   254  			e.token(x.Begin, len("chan"), tokKeyword, nil)
   255  			break
   256  		}
   257  		pos := e.findKeyword("chan", x.Begin+2, x.Value.Pos())
   258  		e.token(pos, len("chan"), tokKeyword, nil)
   259  	case *ast.CommClause:
   260  		iam := len("case")
   261  		if x.Comm == nil {
   262  			iam = len("default")
   263  		}
   264  		e.token(x.Case, iam, tokKeyword, nil)
   265  	case *ast.CompositeLit:
   266  	case *ast.DeclStmt:
   267  	case *ast.DeferStmt:
   268  		e.token(x.Defer, len("defer"), tokKeyword, nil)
   269  	case *ast.Ellipsis:
   270  		e.token(x.Ellipsis, len("..."), tokOperator, nil)
   271  	case *ast.EmptyStmt:
   272  	case *ast.ExprStmt:
   273  	case *ast.Field:
   274  	case *ast.FieldList:
   275  	case *ast.ForStmt:
   276  		e.token(x.For, len("for"), tokKeyword, nil)
   277  	case *ast.FuncDecl:
   278  	case *ast.FuncLit:
   279  	case *ast.FuncType:
   280  		if x.Func != token.NoPos {
   281  			e.token(x.Func, len("func"), tokKeyword, nil)
   282  		}
   283  	case *ast.GenDecl:
   284  		e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil)
   285  	case *ast.GoStmt:
   286  		e.token(x.Go, len("go"), tokKeyword, nil)
   287  	case *ast.Ident:
   288  		e.ident(x)
   289  	case *ast.IfStmt:
   290  		e.token(x.If, len("if"), tokKeyword, nil)
   291  		if x.Else != nil {
   292  			// x.Body.End() or x.Body.End()+1, not that it matters
   293  			pos := e.findKeyword("else", x.Body.End(), x.Else.Pos())
   294  			e.token(pos, len("else"), tokKeyword, nil)
   295  		}
   296  	case *ast.ImportSpec:
   297  		e.importSpec(x)
   298  		pop()
   299  		return false
   300  	case *ast.IncDecStmt:
   301  		e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
   302  	case *ast.IndexExpr:
   303  	case *ast.InterfaceType:
   304  		e.token(x.Interface, len("interface"), tokKeyword, nil)
   305  	case *ast.KeyValueExpr:
   306  	case *ast.LabeledStmt:
   307  	case *ast.MapType:
   308  		e.token(x.Map, len("map"), tokKeyword, nil)
   309  	case *ast.ParenExpr:
   310  	case *ast.RangeStmt:
   311  		e.token(x.For, len("for"), tokKeyword, nil)
   312  		// x.TokPos == token.NoPos is legal (for range foo {})
   313  		offset := x.TokPos
   314  		if offset == token.NoPos {
   315  			offset = x.For
   316  		}
   317  		pos := e.findKeyword("range", offset, x.X.Pos())
   318  		e.token(pos, len("range"), tokKeyword, nil)
   319  	case *ast.ReturnStmt:
   320  		e.token(x.Return, len("return"), tokKeyword, nil)
   321  	case *ast.SelectStmt:
   322  		e.token(x.Select, len("select"), tokKeyword, nil)
   323  	case *ast.SelectorExpr:
   324  	case *ast.SendStmt:
   325  		e.token(x.Arrow, len("<-"), tokOperator, nil)
   326  	case *ast.SliceExpr:
   327  	case *ast.StarExpr:
   328  		e.token(x.Star, len("*"), tokOperator, nil)
   329  	case *ast.StructType:
   330  		e.token(x.Struct, len("struct"), tokKeyword, nil)
   331  	case *ast.SwitchStmt:
   332  		e.token(x.Switch, len("switch"), tokKeyword, nil)
   333  	case *ast.TypeAssertExpr:
   334  		if x.Type == nil {
   335  			pos := e.findKeyword("type", x.Lparen, x.Rparen)
   336  			e.token(pos, len("type"), tokKeyword, nil)
   337  		}
   338  	case *ast.TypeSpec:
   339  	case *ast.TypeSwitchStmt:
   340  		e.token(x.Switch, len("switch"), tokKeyword, nil)
   341  	case *ast.UnaryExpr:
   342  		e.token(x.OpPos, len(x.Op.String()), tokOperator, nil)
   343  	case *ast.ValueSpec:
   344  	// things we won't see
   345  	case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt,
   346  		*ast.File, *ast.Package:
   347  		log.Printf("implement %T %s", x, e.pgf.Tok.PositionFor(x.Pos(), false))
   348  	// things we knowingly ignore
   349  	case *ast.Comment, *ast.CommentGroup:
   350  		pop()
   351  		return false
   352  	default: // just to be super safe.
   353  		e.unexpected(fmt.Sprintf("failed to implement %T", x))
   354  	}
   355  	return true
   356  }
   357  func (e *encoded) ident(x *ast.Ident) {
   358  	def := e.ti.Defs[x]
   359  	if def != nil {
   360  		what, mods := e.definitionFor(x)
   361  		if what != "" {
   362  			e.token(x.Pos(), len(x.String()), what, mods)
   363  		}
   364  		return
   365  	}
   366  	use := e.ti.Uses[x]
   367  	switch y := use.(type) {
   368  	case nil:
   369  		e.token(x.NamePos, len(x.Name), tokVariable, []string{"definition"})
   370  	case *types.Builtin:
   371  		e.token(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"})
   372  	case *types.Const:
   373  		mods := []string{"readonly"}
   374  		tt := y.Type()
   375  		if _, ok := tt.(*types.Basic); ok {
   376  			e.token(x.Pos(), len(x.String()), tokVariable, mods)
   377  			break
   378  		}
   379  		if ttx, ok := tt.(*types.Named); ok {
   380  			if x.String() == "iota" {
   381  				e.unexpected(fmt.Sprintf("iota:%T", ttx))
   382  			}
   383  			if _, ok := ttx.Underlying().(*types.Basic); ok {
   384  				e.token(x.Pos(), len(x.String()), tokVariable, mods)
   385  				break
   386  			}
   387  			e.unexpected(fmt.Sprintf("%q/%T", x.String(), tt))
   388  		}
   389  		// can this happen? Don't think so
   390  		e.unexpected(fmt.Sprintf("%s %T %#v", x.String(), tt, tt))
   391  	case *types.Func:
   392  		e.token(x.Pos(), len(x.Name), tokFunction, nil)
   393  	case *types.Label:
   394  		// nothing to map it to
   395  	case *types.Nil:
   396  		// nil is a predeclared identifier
   397  		e.token(x.Pos(), len("nil"), tokVariable, []string{"readonly"})
   398  	case *types.PkgName:
   399  		e.token(x.Pos(), len(x.Name), tokNamespace, nil)
   400  	case *types.TypeName:
   401  		e.token(x.Pos(), len(x.String()), tokType, nil)
   402  	case *types.Var:
   403  		e.token(x.Pos(), len(x.Name), tokVariable, nil)
   404  	default:
   405  		// replace with panic after extensive testing
   406  		if use == nil {
   407  			msg := fmt.Sprintf("%#v/%#v %#v %#v", x, x.Obj, e.ti.Defs[x], e.ti.Uses[x])
   408  			e.unexpected(msg)
   409  		}
   410  		if use.Type() != nil {
   411  			e.unexpected(fmt.Sprintf("%s %T/%T,%#v", x.String(), use, use.Type(), use))
   412  		} else {
   413  			e.unexpected(fmt.Sprintf("%s %T", x.String(), use))
   414  		}
   415  	}
   416  }
   417  
   418  func (e *encoded) definitionFor(x *ast.Ident) (tokenType, []string) {
   419  	mods := []string{"definition"}
   420  	for i := len(e.stack) - 1; i >= 0; i-- {
   421  		s := e.stack[i]
   422  		switch y := s.(type) {
   423  		case *ast.AssignStmt, *ast.RangeStmt:
   424  			if x.Name == "_" {
   425  				return "", nil // not really a variable
   426  			}
   427  			return "variable", mods
   428  		case *ast.GenDecl:
   429  			if y.Tok == token.CONST {
   430  				mods = append(mods, "readonly")
   431  			}
   432  			return tokVariable, mods
   433  		case *ast.FuncDecl:
   434  			// If x is immediately under a FuncDecl, it is a function or method
   435  			if i == len(e.stack)-2 {
   436  				if y.Recv != nil {
   437  					return tokMember, mods
   438  				}
   439  				return tokFunction, mods
   440  			}
   441  			// if x < ... < FieldList < FuncDecl, this is the receiver, a variable
   442  			if _, ok := e.stack[i+1].(*ast.FieldList); ok {
   443  				return tokVariable, nil
   444  			}
   445  			// if x < ... < FieldList < FuncType < FuncDecl, this is a param
   446  			return tokParameter, mods
   447  		case *ast.InterfaceType:
   448  			return tokMember, mods
   449  		case *ast.TypeSpec:
   450  			return tokType, mods
   451  		}
   452  	}
   453  	// panic after extensive testing
   454  	msg := fmt.Sprintf("failed to find the decl for %s", e.pgf.Tok.PositionFor(x.Pos(), false))
   455  	e.unexpected(msg)
   456  	return "", []string{""}
   457  }
   458  
   459  // findKeyword finds a keyword rather than guessing its location
   460  func (e *encoded) findKeyword(keyword string, start, end token.Pos) token.Pos {
   461  	offset := int(start) - e.pgf.Tok.Base()
   462  	last := int(end) - e.pgf.Tok.Base()
   463  	buf := e.pgf.Src
   464  	idx := bytes.Index(buf[offset:last], []byte(keyword))
   465  	if idx != -1 {
   466  		return start + token.Pos(idx)
   467  	}
   468  	// can't happen
   469  	e.unexpected(fmt.Sprintf("not found:%s %v", keyword, e.fset.PositionFor(start, false)))
   470  	return token.NoPos
   471  }
   472  
   473  func (e *encoded) init() error {
   474  	e.start = token.Pos(e.pgf.Tok.Base())
   475  	e.end = e.start + token.Pos(e.pgf.Tok.Size())
   476  	if e.rng == nil {
   477  		return nil
   478  	}
   479  	span, err := e.pgf.Mapper.RangeSpan(*e.rng)
   480  	if err != nil {
   481  		return errors.Errorf("range span error for %s", e.pgf.File.Name)
   482  	}
   483  	e.end = e.start + token.Pos(span.End().Offset())
   484  	e.start += token.Pos(span.Start().Offset())
   485  	return nil
   486  }
   487  
   488  func (e *encoded) Data() ([]float64, error) {
   489  	// binary operators, at least, will be out of order
   490  	sort.Slice(e.items, func(i, j int) bool {
   491  		if e.items[i].line != e.items[j].line {
   492  			return e.items[i].line < e.items[j].line
   493  		}
   494  		return e.items[i].start < e.items[j].start
   495  	})
   496  	typeMap, modMap := e.maps()
   497  	// each semantic token needs five values
   498  	// (see Integer Encoding for Tokens in the LSP spec)
   499  	x := make([]float64, 5*len(e.items))
   500  	for i := 0; i < len(e.items); i++ {
   501  		j := 5 * i
   502  		if i == 0 {
   503  			x[0] = e.items[0].line
   504  		} else {
   505  			x[j] = e.items[i].line - e.items[i-1].line
   506  		}
   507  		x[j+1] = e.items[i].start
   508  		if i > 0 && e.items[i].line == e.items[i-1].line {
   509  			x[j+1] = e.items[i].start - e.items[i-1].start
   510  		}
   511  		x[j+2] = e.items[i].len
   512  		x[j+3] = float64(typeMap[e.items[i].typeStr])
   513  		mask := 0
   514  		for _, s := range e.items[i].mods {
   515  			mask |= modMap[s]
   516  		}
   517  		x[j+4] = float64(mask)
   518  	}
   519  	return x, nil
   520  }
   521  
   522  func (e *encoded) importSpec(d *ast.ImportSpec) {
   523  	// a local package name or the last component of the Path
   524  	if d.Name != nil {
   525  		nm := d.Name.String()
   526  		// import . x => x is not a namespace
   527  		// import _ x => x is a namespace
   528  		if nm != "_" && nm != "." {
   529  			e.token(d.Name.Pos(), len(nm), tokNamespace, nil)
   530  			return
   531  		}
   532  		if nm == "." {
   533  			return
   534  		}
   535  		// and fall through for _
   536  	}
   537  	nm := d.Path.Value[1 : len(d.Path.Value)-1] // trailing "
   538  	v := strings.LastIndex(nm, "/")
   539  	if v != -1 {
   540  		nm = nm[v+1:]
   541  	}
   542  	start := d.Path.End() - token.Pos(1+len(nm))
   543  	e.token(start, len(nm), tokNamespace, nil)
   544  }
   545  
   546  // panic on unexpected state
   547  func (e *encoded) unexpected(msg string) {
   548  	log.Print(msg)
   549  	log.Print(e.strStack())
   550  	panic(msg)
   551  }
   552  
   553  // SemType returns a string equivalent of the type, for gopls semtok
   554  func SemType(n int) string {
   555  	tokTypes := SemanticTypes()
   556  	tokMods := SemanticModifiers()
   557  	if n >= 0 && n < len(tokTypes) {
   558  		return tokTypes[n]
   559  	}
   560  	return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods))
   561  }
   562  
   563  // SemMods returns the []string equivalent of the mods, for gopls semtok.
   564  func SemMods(n int) []string {
   565  	tokMods := SemanticModifiers()
   566  	mods := []string{}
   567  	for i := 0; i < len(tokMods); i++ {
   568  		if (n & (1 << uint(i))) != 0 {
   569  			mods = append(mods, tokMods[i])
   570  		}
   571  	}
   572  	return mods
   573  }
   574  
   575  func (e *encoded) maps() (map[tokenType]int, map[string]int) {
   576  	tmap := make(map[tokenType]int)
   577  	mmap := make(map[string]int)
   578  	for i, t := range e.tokTypes {
   579  		tmap[tokenType(t)] = i
   580  	}
   581  	for i, m := range e.tokMods {
   582  		mmap[m] = 1 << uint(i) // go 1.12 compatibility
   583  	}
   584  	return tmap, mmap
   585  }
   586  
   587  // SemanticTypes to use in case there is no client, as in the command line, or tests
   588  func SemanticTypes() []string {
   589  	return semanticTypes[:]
   590  }
   591  
   592  // SemanticModifiers to use in case there is no client.
   593  func SemanticModifiers() []string {
   594  	return semanticModifiers[:]
   595  }
   596  
   597  var (
   598  	semanticTypes = [...]string{
   599  		"namespace", "type", "class", "enum", "interface",
   600  		"struct", "typeParameter", "parameter", "variable", "property", "enumMember",
   601  		"event", "function", "member", "macro", "keyword", "modifier", "comment",
   602  		"string", "number", "regexp", "operator"}
   603  	semanticModifiers = [...]string{
   604  		"declaration", "definition", "readonly", "static",
   605  		"deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary"}
   606  )