github.com/jhump/golang-x-tools@v0.0.0-20220218190644-4958d6d39439/internal/lsp/semantic.go (about) 1 // Copyright 2020 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package lsp 6 7 import ( 8 "bytes" 9 "context" 10 "fmt" 11 "go/ast" 12 "go/token" 13 "go/types" 14 "log" 15 "path/filepath" 16 "sort" 17 "strings" 18 "time" 19 20 "github.com/jhump/golang-x-tools/internal/event" 21 "github.com/jhump/golang-x-tools/internal/lsp/protocol" 22 "github.com/jhump/golang-x-tools/internal/lsp/source" 23 "github.com/jhump/golang-x-tools/internal/lsp/template" 24 "github.com/jhump/golang-x-tools/internal/typeparams" 25 errors "golang.org/x/xerrors" 26 ) 27 28 // The LSP says that errors for the semantic token requests should only be returned 29 // for exceptions (a word not otherwise defined). This code treats a too-large file 30 // as an exception. On parse errors, the code does what it can. 31 32 // reject full semantic token requests for large files 33 const maxFullFileSize int = 100000 34 35 // to control comprehensive logging of decisions (gopls semtok foo.go > /dev/null shows log output) 36 // semDebug should NEVER be true in checked-in code 37 const semDebug = false 38 39 func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { 40 ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil) 41 return ret, err 42 } 43 44 func (s *Server) semanticTokensFullDelta(ctx context.Context, p *protocol.SemanticTokensDeltaParams) (interface{}, error) { 45 return nil, errors.Errorf("implement SemanticTokensFullDelta") 46 } 47 48 func (s *Server) semanticTokensRange(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { 49 ret, err := s.computeSemanticTokens(ctx, p.TextDocument, &p.Range) 50 return ret, err 51 } 52 53 func (s *Server) semanticTokensRefresh(ctx context.Context) error { 54 // in the code, but not in the protocol spec 55 return errors.Errorf("implement SemanticTokensRefresh") 56 } 57 58 func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { 59 ans := protocol.SemanticTokens{ 60 Data: []uint32{}, 61 } 62 snapshot, fh, ok, release, err := s.beginFileRequest(ctx, td.URI, source.UnknownKind) 63 defer release() 64 if !ok { 65 return nil, err 66 } 67 vv := snapshot.View() 68 if !vv.Options().SemanticTokens { 69 // return an error, so if the option changes 70 // the client won't remember the wrong answer 71 return nil, errors.Errorf("semantictokens are disabled") 72 } 73 kind := snapshot.View().FileKind(fh) 74 if kind == source.Tmpl { 75 // this is a little cumbersome to avoid both exporting 'encoded' and its methods 76 // and to avoid import cycles 77 e := &encoded{ 78 ctx: ctx, 79 rng: rng, 80 tokTypes: s.session.Options().SemanticTypes, 81 tokMods: s.session.Options().SemanticMods, 82 } 83 add := func(line, start uint32, len uint32) { 84 e.add(line, start, len, tokMacro, nil) 85 } 86 data := func() []uint32 { 87 return e.Data() 88 } 89 return template.SemanticTokens(ctx, snapshot, fh.URI(), add, data) 90 } 91 if kind != source.Go { 92 return nil, nil 93 } 94 pkg, err := snapshot.PackageForFile(ctx, fh.URI(), source.TypecheckFull, source.WidestPackage) 95 if err != nil { 96 return nil, err 97 } 98 pgf, err := pkg.File(fh.URI()) 99 if err != nil { 100 return nil, err 101 } 102 // ignore pgf.ParseErr. Do what we can. 103 if rng == nil && len(pgf.Src) > maxFullFileSize { 104 err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)", 105 fh.URI().Filename(), len(pgf.Src), maxFullFileSize) 106 return nil, err 107 } 108 e := &encoded{ 109 ctx: ctx, 110 pgf: pgf, 111 rng: rng, 112 ti: pkg.GetTypesInfo(), 113 pkg: pkg, 114 fset: snapshot.FileSet(), 115 tokTypes: s.session.Options().SemanticTypes, 116 tokMods: s.session.Options().SemanticMods, 117 } 118 if err := e.init(); err != nil { 119 // e.init should never return an error, unless there's some 120 // seemingly impossible race condition 121 return nil, err 122 } 123 e.semantics() 124 ans.Data = e.Data() 125 // For delta requests, but we've never seen any. 126 ans.ResultID = fmt.Sprintf("%v", time.Now()) 127 return &ans, nil 128 } 129 130 func (e *encoded) semantics() { 131 f := e.pgf.File 132 // may not be in range, but harmless 133 e.token(f.Package, len("package"), tokKeyword, nil) 134 e.token(f.Name.NamePos, len(f.Name.Name), tokNamespace, nil) 135 inspect := func(n ast.Node) bool { 136 return e.inspector(n) 137 } 138 for _, d := range f.Decls { 139 // only look at the decls that overlap the range 140 start, end := d.Pos(), d.End() 141 if end <= e.start || start >= e.end { 142 continue 143 } 144 ast.Inspect(d, inspect) 145 } 146 for _, cg := range f.Comments { 147 for _, c := range cg.List { 148 if !strings.Contains(c.Text, "\n") { 149 e.token(c.Pos(), len(c.Text), tokComment, nil) 150 continue 151 } 152 e.multiline(c.Pos(), c.End(), c.Text, tokComment) 153 } 154 } 155 } 156 157 type tokenType string 158 159 const ( 160 tokNamespace tokenType = "namespace" 161 tokType tokenType = "type" 162 tokInterface tokenType = "interface" 163 tokTypeParam tokenType = "typeParameter" 164 tokParameter tokenType = "parameter" 165 tokVariable tokenType = "variable" 166 tokMethod tokenType = "method" 167 tokFunction tokenType = "function" 168 tokKeyword tokenType = "keyword" 169 tokComment tokenType = "comment" 170 tokString tokenType = "string" 171 tokNumber tokenType = "number" 172 tokOperator tokenType = "operator" 173 174 tokMacro tokenType = "macro" // for templates 175 ) 176 177 func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) { 178 179 if !start.IsValid() { 180 // This is not worth reporting 181 return 182 } 183 if start >= e.end || start+token.Pos(leng) <= e.start { 184 return 185 } 186 // want a line and column from start (in LSP coordinates) 187 // [//line directives should be ignored] 188 rng := source.NewMappedRange(e.fset, e.pgf.Mapper, start, start+token.Pos(leng)) 189 lspRange, err := rng.Range() 190 if err != nil { 191 // possibly a //line directive. TODO(pjw): fix this somehow 192 // "column mapper is for file...instead of..." 193 // "line is beyond end of file..." 194 // see line 116 of internal/span/token.go which uses Position not PositionFor 195 // (it is too verbose to print the error on every token. some other RPC will fail) 196 // event.Error(e.ctx, "failed to convert to range", err) 197 return 198 } 199 if lspRange.End.Line != lspRange.Start.Line { 200 // this happens if users are typing at the end of the file, but report nothing 201 return 202 } 203 // token is all on one line 204 length := lspRange.End.Character - lspRange.Start.Character 205 e.add(lspRange.Start.Line, lspRange.Start.Character, length, typ, mods) 206 } 207 208 func (e *encoded) add(line, start uint32, len uint32, tok tokenType, mod []string) { 209 x := semItem{line, start, len, tok, mod} 210 e.items = append(e.items, x) 211 } 212 213 // semItem represents a token found walking the parse tree 214 type semItem struct { 215 line, start uint32 216 len uint32 217 typeStr tokenType 218 mods []string 219 } 220 221 type encoded struct { 222 // the generated data 223 items []semItem 224 225 ctx context.Context 226 tokTypes, tokMods []string 227 pgf *source.ParsedGoFile 228 rng *protocol.Range 229 ti *types.Info 230 types *types.Package 231 pkg source.Package 232 fset *token.FileSet 233 // allowed starting and ending token.Pos, set by init 234 // used to avoid looking at declarations not in range 235 start, end token.Pos 236 // path from the root of the parse tree, used for debugging 237 stack []ast.Node 238 } 239 240 // convert the stack to a string, for debugging 241 func (e *encoded) strStack() string { 242 msg := []string{"["} 243 for i := len(e.stack) - 1; i >= 0; i-- { 244 s := e.stack[i] 245 msg = append(msg, fmt.Sprintf("%T", s)[5:]) 246 } 247 if len(e.stack) > 0 { 248 loc := e.stack[len(e.stack)-1].Pos() 249 if !source.InRange(e.pgf.Tok, loc) { 250 msg = append(msg, fmt.Sprintf("invalid position %v for %s", loc, e.pgf.URI)) 251 } else if locInRange(e.pgf.Tok, loc) { 252 add := e.pgf.Tok.PositionFor(loc, false) 253 nm := filepath.Base(add.Filename) 254 msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", nm, add.Line, add.Column)) 255 } else { 256 msg = append(msg, fmt.Sprintf("(loc %d out of range)", loc)) 257 } 258 } 259 msg = append(msg, "]") 260 return strings.Join(msg, " ") 261 } 262 263 // avoid panic in token.PostionFor() when typing at the end of the file 264 func locInRange(f *token.File, loc token.Pos) bool { 265 return f.Base() <= int(loc) && int(loc) < f.Base()+f.Size() 266 } 267 268 // find the line in the source 269 func (e *encoded) srcLine(x ast.Node) string { 270 file := e.pgf.Tok 271 line := file.Line(x.Pos()) 272 start, err := source.Offset(file, file.LineStart(line)) 273 if err != nil { 274 return "" 275 } 276 end := start 277 for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ { 278 279 } 280 ans := e.pgf.Src[start:end] 281 return string(ans) 282 } 283 284 func (e *encoded) inspector(n ast.Node) bool { 285 pop := func() { 286 e.stack = e.stack[:len(e.stack)-1] 287 } 288 if n == nil { 289 pop() 290 return true 291 } 292 e.stack = append(e.stack, n) 293 switch x := n.(type) { 294 case *ast.ArrayType: 295 case *ast.AssignStmt: 296 e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil) 297 case *ast.BasicLit: 298 if strings.Contains(x.Value, "\n") { 299 // has to be a string 300 e.multiline(x.Pos(), x.End(), x.Value, tokString) 301 break 302 } 303 ln := len(x.Value) 304 what := tokNumber 305 if x.Kind == token.STRING { 306 what = tokString 307 if _, ok := e.stack[len(e.stack)-2].(*ast.Field); ok { 308 // struct tags (this is probably pointless, as the 309 // TextMate grammar will treat all the other comments the same) 310 what = tokComment 311 } 312 } 313 e.token(x.Pos(), ln, what, nil) 314 case *ast.BinaryExpr: 315 e.token(x.OpPos, len(x.Op.String()), tokOperator, nil) 316 case *ast.BlockStmt: 317 case *ast.BranchStmt: 318 e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil) 319 // There's no semantic encoding for labels 320 case *ast.CallExpr: 321 if x.Ellipsis != token.NoPos { 322 e.token(x.Ellipsis, len("..."), tokOperator, nil) 323 } 324 case *ast.CaseClause: 325 iam := "case" 326 if x.List == nil { 327 iam = "default" 328 } 329 e.token(x.Case, len(iam), tokKeyword, nil) 330 case *ast.ChanType: 331 // chan | chan <- | <- chan 332 if x.Arrow == token.NoPos || x.Arrow != x.Begin { 333 e.token(x.Begin, len("chan"), tokKeyword, nil) 334 break 335 } 336 pos := e.findKeyword("chan", x.Begin+2, x.Value.Pos()) 337 e.token(pos, len("chan"), tokKeyword, nil) 338 case *ast.CommClause: 339 iam := len("case") 340 if x.Comm == nil { 341 iam = len("default") 342 } 343 e.token(x.Case, iam, tokKeyword, nil) 344 case *ast.CompositeLit: 345 case *ast.DeclStmt: 346 case *ast.DeferStmt: 347 e.token(x.Defer, len("defer"), tokKeyword, nil) 348 case *ast.Ellipsis: 349 e.token(x.Ellipsis, len("..."), tokOperator, nil) 350 case *ast.EmptyStmt: 351 case *ast.ExprStmt: 352 case *ast.Field: 353 case *ast.FieldList: 354 case *ast.ForStmt: 355 e.token(x.For, len("for"), tokKeyword, nil) 356 case *ast.FuncDecl: 357 case *ast.FuncLit: 358 case *ast.FuncType: 359 if x.Func != token.NoPos { 360 e.token(x.Func, len("func"), tokKeyword, nil) 361 } 362 case *ast.GenDecl: 363 e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil) 364 case *ast.GoStmt: 365 e.token(x.Go, len("go"), tokKeyword, nil) 366 case *ast.Ident: 367 e.ident(x) 368 case *ast.IfStmt: 369 e.token(x.If, len("if"), tokKeyword, nil) 370 if x.Else != nil { 371 // x.Body.End() or x.Body.End()+1, not that it matters 372 pos := e.findKeyword("else", x.Body.End(), x.Else.Pos()) 373 e.token(pos, len("else"), tokKeyword, nil) 374 } 375 case *ast.ImportSpec: 376 e.importSpec(x) 377 pop() 378 return false 379 case *ast.IncDecStmt: 380 e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil) 381 case *ast.IndexExpr: 382 case *typeparams.IndexListExpr: // accomodate generics 383 case *ast.InterfaceType: 384 e.token(x.Interface, len("interface"), tokKeyword, nil) 385 case *ast.KeyValueExpr: 386 case *ast.LabeledStmt: 387 case *ast.MapType: 388 e.token(x.Map, len("map"), tokKeyword, nil) 389 case *ast.ParenExpr: 390 case *ast.RangeStmt: 391 e.token(x.For, len("for"), tokKeyword, nil) 392 // x.TokPos == token.NoPos is legal (for range foo {}) 393 offset := x.TokPos 394 if offset == token.NoPos { 395 offset = x.For 396 } 397 pos := e.findKeyword("range", offset, x.X.Pos()) 398 e.token(pos, len("range"), tokKeyword, nil) 399 case *ast.ReturnStmt: 400 e.token(x.Return, len("return"), tokKeyword, nil) 401 case *ast.SelectStmt: 402 e.token(x.Select, len("select"), tokKeyword, nil) 403 case *ast.SelectorExpr: 404 case *ast.SendStmt: 405 e.token(x.Arrow, len("<-"), tokOperator, nil) 406 case *ast.SliceExpr: 407 case *ast.StarExpr: 408 e.token(x.Star, len("*"), tokOperator, nil) 409 case *ast.StructType: 410 e.token(x.Struct, len("struct"), tokKeyword, nil) 411 case *ast.SwitchStmt: 412 e.token(x.Switch, len("switch"), tokKeyword, nil) 413 case *ast.TypeAssertExpr: 414 if x.Type == nil { 415 pos := e.findKeyword("type", x.Lparen, x.Rparen) 416 e.token(pos, len("type"), tokKeyword, nil) 417 } 418 case *ast.TypeSpec: 419 case *ast.TypeSwitchStmt: 420 e.token(x.Switch, len("switch"), tokKeyword, nil) 421 case *ast.UnaryExpr: 422 e.token(x.OpPos, len(x.Op.String()), tokOperator, nil) 423 case *ast.ValueSpec: 424 // things only seen with parsing or type errors, so ignore them 425 case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: 426 return true 427 // not going to see these 428 case *ast.File, *ast.Package: 429 e.unexpected(fmt.Sprintf("implement %T %s", x, e.pgf.Tok.PositionFor(x.Pos(), false))) 430 // other things we knowingly ignore 431 case *ast.Comment, *ast.CommentGroup: 432 pop() 433 return false 434 default: 435 e.unexpected(fmt.Sprintf("failed to implement %T", x)) 436 } 437 return true 438 } 439 440 func (e *encoded) ident(x *ast.Ident) { 441 if e.ti == nil { 442 what, mods := e.unkIdent(x) 443 if what != "" { 444 e.token(x.Pos(), len(x.String()), what, mods) 445 } 446 if semDebug { 447 log.Printf(" nil %s/nil/nil %q %v %s", x.String(), what, mods, e.strStack()) 448 } 449 return 450 } 451 def := e.ti.Defs[x] 452 if def != nil { 453 what, mods := e.definitionFor(x, def) 454 if what != "" { 455 e.token(x.Pos(), len(x.String()), what, mods) 456 } 457 if semDebug { 458 log.Printf(" for %s/%T/%T got %s %v (%s)", x.String(), def, def.Type(), what, mods, e.strStack()) 459 } 460 return 461 } 462 use := e.ti.Uses[x] 463 tok := func(pos token.Pos, lng int, tok tokenType, mods []string) { 464 e.token(pos, lng, tok, mods) 465 q := "nil" 466 if use != nil { 467 q = fmt.Sprintf("%T", use.Type()) 468 } 469 if semDebug { 470 log.Printf(" use %s/%T/%s got %s %v (%s)", x.String(), use, q, tok, mods, e.strStack()) 471 } 472 } 473 474 switch y := use.(type) { 475 case nil: 476 what, mods := e.unkIdent(x) 477 if what != "" { 478 tok(x.Pos(), len(x.String()), what, mods) 479 } else if semDebug { 480 // tok() wasn't called, so didn't log 481 log.Printf(" nil %s/%T/nil %q %v (%s)", x.String(), use, what, mods, e.strStack()) 482 } 483 return 484 case *types.Builtin: 485 tok(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"}) 486 case *types.Const: 487 mods := []string{"readonly"} 488 tt := y.Type() 489 if _, ok := tt.(*types.Basic); ok { 490 tok(x.Pos(), len(x.String()), tokVariable, mods) 491 break 492 } 493 if ttx, ok := tt.(*types.Named); ok { 494 if x.String() == "iota" { 495 e.unexpected(fmt.Sprintf("iota:%T", ttx)) 496 } 497 if _, ok := ttx.Underlying().(*types.Basic); ok { 498 tok(x.Pos(), len(x.String()), tokVariable, mods) 499 break 500 } 501 e.unexpected(fmt.Sprintf("%q/%T", x.String(), tt)) 502 } 503 // can this happen? Don't think so 504 e.unexpected(fmt.Sprintf("%s %T %#v", x.String(), tt, tt)) 505 case *types.Func: 506 tok(x.Pos(), len(x.Name), tokFunction, nil) 507 case *types.Label: 508 // nothing to map it to 509 case *types.Nil: 510 // nil is a predeclared identifier 511 tok(x.Pos(), len("nil"), tokVariable, []string{"readonly", "defaultLibrary"}) 512 case *types.PkgName: 513 tok(x.Pos(), len(x.Name), tokNamespace, nil) 514 case *types.TypeName: // could be a tokTpeParam 515 var mods []string 516 if _, ok := y.Type().(*types.Basic); ok { 517 mods = []string{"defaultLibrary"} 518 } else if _, ok := y.Type().(*typeparams.TypeParam); ok { 519 tok(x.Pos(), len(x.String()), tokTypeParam, mods) 520 break 521 } 522 tok(x.Pos(), len(x.String()), tokType, mods) 523 case *types.Var: 524 if isSignature(y) { 525 tok(x.Pos(), len(x.Name), tokFunction, nil) 526 } else if _, ok := y.Type().(*typeparams.TypeParam); ok { 527 tok(x.Pos(), len(x.Name), tokTypeParam, nil) 528 } else { 529 tok(x.Pos(), len(x.Name), tokVariable, nil) 530 } 531 default: 532 // can't happen 533 if use == nil { 534 msg := fmt.Sprintf("%#v/%#v %#v %#v", x, x.Obj, e.ti.Defs[x], e.ti.Uses[x]) 535 e.unexpected(msg) 536 } 537 if use.Type() != nil { 538 e.unexpected(fmt.Sprintf("%s %T/%T,%#v", x.String(), use, use.Type(), use)) 539 } else { 540 e.unexpected(fmt.Sprintf("%s %T", x.String(), use)) 541 } 542 } 543 } 544 545 func isSignature(use types.Object) bool { 546 if true { 547 return false //PJW: fix after generics seem ok 548 } 549 if _, ok := use.(*types.Var); !ok { 550 return false 551 } 552 v := use.Type() 553 if v == nil { 554 return false 555 } 556 if _, ok := v.(*types.Signature); ok { 557 return true 558 } 559 return false 560 } 561 562 // both e.ti.Defs and e.ti.Uses are nil. use the parse stack. 563 // a lot of these only happen when the package doesn't compile 564 // but in that case it is all best-effort from the parse tree 565 func (e *encoded) unkIdent(x *ast.Ident) (tokenType, []string) { 566 def := []string{"definition"} 567 n := len(e.stack) - 2 // parent of Ident 568 if n < 0 { 569 e.unexpected("no stack?") 570 return "", nil 571 } 572 switch nd := e.stack[n].(type) { 573 case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr, 574 *ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr, 575 *ast.ReturnStmt, *ast.ChanType, *ast.SendStmt, 576 *ast.ForStmt, // possibly incomplete 577 *ast.IfStmt, /* condition */ 578 *ast.KeyValueExpr: // either key or value 579 return tokVariable, nil 580 case *typeparams.IndexListExpr: // generic? 581 return tokVariable, nil 582 case *ast.Ellipsis: 583 return tokType, nil 584 case *ast.CaseClause: 585 if n-2 >= 0 { 586 if _, ok := e.stack[n-2].(*ast.TypeSwitchStmt); ok { 587 return tokType, nil 588 } 589 } 590 return tokVariable, nil 591 case *ast.ArrayType: 592 if x == nd.Len { 593 // or maybe a Type Param, but we can't just from the parse tree 594 return tokVariable, nil 595 } else { 596 return tokType, nil 597 } 598 case *ast.MapType: 599 return tokType, nil 600 case *ast.CallExpr: 601 if x == nd.Fun { 602 return tokFunction, nil 603 } 604 return tokVariable, nil 605 case *ast.SwitchStmt: 606 return tokVariable, nil 607 case *ast.TypeAssertExpr: 608 if x == nd.X { 609 return tokVariable, nil 610 } else if x == nd.Type { 611 return tokType, nil 612 } 613 case *ast.ValueSpec: 614 for _, p := range nd.Names { 615 if p == x { 616 return tokVariable, def 617 } 618 } 619 for _, p := range nd.Values { 620 if p == x { 621 return tokVariable, nil 622 } 623 } 624 return tokType, nil 625 case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help 626 if n-1 >= 0 { 627 if ce, ok := e.stack[n-1].(*ast.CallExpr); ok { 628 // ... CallExpr SelectorExpr Ident (_.x()) 629 if ce.Fun == nd && nd.Sel == x { 630 return tokFunction, nil 631 } 632 } 633 } 634 return tokVariable, nil 635 case *ast.AssignStmt: 636 for _, p := range nd.Lhs { 637 // x := ..., or x = ... 638 if p == x { 639 if nd.Tok != token.DEFINE { 640 def = nil 641 } 642 return tokVariable, def 643 } 644 } 645 // RHS, = x 646 return tokVariable, nil 647 case *ast.TypeSpec: // it's a type if it is either the Name or the Type 648 if x == nd.Type { 649 def = nil 650 } 651 return tokType, def 652 case *ast.Field: 653 // ident could be type in a field, or a method in an interface type, or a variable 654 if x == nd.Type { 655 return tokType, nil 656 } 657 if n-2 >= 0 { 658 _, okit := e.stack[n-2].(*ast.InterfaceType) 659 _, okfl := e.stack[n-1].(*ast.FieldList) 660 if okit && okfl { 661 return tokMethod, def 662 } 663 } 664 return tokVariable, nil 665 case *ast.LabeledStmt, *ast.BranchStmt: 666 // nothing to report 667 case *ast.CompositeLit: 668 if nd.Type == x { 669 return tokType, nil 670 } 671 return tokVariable, nil 672 case *ast.RangeStmt: 673 if nd.Tok != token.DEFINE { 674 def = nil 675 } 676 return tokVariable, def 677 case *ast.FuncDecl: 678 return tokFunction, def 679 default: 680 msg := fmt.Sprintf("%T undexpected: %s %s%q", nd, x.Name, e.strStack(), e.srcLine(x)) 681 e.unexpected(msg) 682 } 683 return "", nil 684 } 685 686 func isDeprecated(n *ast.CommentGroup) bool { 687 if n == nil { 688 return false 689 } 690 for _, c := range n.List { 691 if strings.HasPrefix(c.Text, "// Deprecated") { 692 return true 693 } 694 } 695 return false 696 } 697 698 func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []string) { 699 // PJW: def == types.Label? probably a nothing 700 // PJW: look into replaceing these syntactic tests with types more generally 701 mods := []string{"definition"} 702 for i := len(e.stack) - 1; i >= 0; i-- { 703 s := e.stack[i] 704 switch y := s.(type) { 705 case *ast.AssignStmt, *ast.RangeStmt: 706 if x.Name == "_" { 707 return "", nil // not really a variable 708 } 709 return tokVariable, mods 710 case *ast.GenDecl: 711 if isDeprecated(y.Doc) { 712 mods = append(mods, "deprecated") 713 } 714 if y.Tok == token.CONST { 715 mods = append(mods, "readonly") 716 } 717 return tokVariable, mods 718 case *ast.FuncDecl: 719 // If x is immediately under a FuncDecl, it is a function or method 720 if i == len(e.stack)-2 { 721 if isDeprecated(y.Doc) { 722 mods = append(mods, "deprecated") 723 } 724 if y.Recv != nil { 725 return tokMethod, mods 726 } 727 return tokFunction, mods 728 } 729 // if x < ... < FieldList < FuncDecl, this is the receiver, a variable 730 if _, ok := e.stack[i+1].(*ast.FieldList); ok { 731 return tokVariable, nil 732 } 733 // if x < ... < FieldList < FuncType < FuncDecl, this is a param 734 return tokParameter, mods 735 case *ast.FuncType: 736 return tokParameter, mods 737 case *ast.InterfaceType: 738 return tokMethod, mods 739 case *ast.TypeSpec: 740 // GenDecl/Typespec/FuncType/FieldList/Field/Ident 741 // (type A func(b uint64)) (err error) 742 // b and err should not be tokType, but tokVaraible 743 // and in GenDecl/TpeSpec/StructType/FieldList/Field/Ident 744 // (type A struct{b uint64} 745 // but on type B struct{C}), C is a type, but is not being defined. 746 // GenDecl/TypeSpec/FieldList/Field/Ident is a typeParam 747 if _, ok := e.stack[i+1].(*ast.FieldList); ok { 748 return tokTypeParam, mods 749 } 750 fldm := e.stack[len(e.stack)-2] 751 if fld, ok := fldm.(*ast.Field); ok { 752 // if len(fld.names) == 0 this is a tokType, being used 753 if len(fld.Names) == 0 { 754 return tokType, nil 755 } 756 return tokVariable, mods 757 } 758 return tokType, mods 759 } 760 } 761 // can't happen 762 msg := fmt.Sprintf("failed to find the decl for %s", e.pgf.Tok.PositionFor(x.Pos(), false)) 763 e.unexpected(msg) 764 return "", []string{""} 765 } 766 767 func (e *encoded) multiline(start, end token.Pos, val string, tok tokenType) { 768 f := e.fset.File(start) 769 // the hard part is finding the lengths of lines. include the \n 770 leng := func(line int) int { 771 n := f.LineStart(line) 772 if line >= f.LineCount() { 773 return f.Size() - int(n) 774 } 775 return int(f.LineStart(line+1) - n) 776 } 777 spos := e.fset.PositionFor(start, false) 778 epos := e.fset.PositionFor(end, false) 779 sline := spos.Line 780 eline := epos.Line 781 // first line is from spos.Column to end 782 e.token(start, leng(sline)-spos.Column, tok, nil) // leng(sline)-1 - (spos.Column-1) 783 for i := sline + 1; i < eline; i++ { 784 // intermediate lines are from 1 to end 785 e.token(f.LineStart(i), leng(i)-1, tok, nil) // avoid the newline 786 } 787 // last line is from 1 to epos.Column 788 e.token(f.LineStart(eline), epos.Column-1, tok, nil) // columns are 1-based 789 } 790 791 // findKeyword finds a keyword rather than guessing its location 792 func (e *encoded) findKeyword(keyword string, start, end token.Pos) token.Pos { 793 offset := int(start) - e.pgf.Tok.Base() 794 last := int(end) - e.pgf.Tok.Base() 795 buf := e.pgf.Src 796 idx := bytes.Index(buf[offset:last], []byte(keyword)) 797 if idx != -1 { 798 return start + token.Pos(idx) 799 } 800 //(in unparsable programs: type _ <-<-chan int) 801 e.unexpected(fmt.Sprintf("not found:%s %v", keyword, e.fset.PositionFor(start, false))) 802 return token.NoPos 803 } 804 805 func (e *encoded) init() error { 806 e.start = token.Pos(e.pgf.Tok.Base()) 807 e.end = e.start + token.Pos(e.pgf.Tok.Size()) 808 if e.rng == nil { 809 return nil 810 } 811 span, err := e.pgf.Mapper.RangeSpan(*e.rng) 812 if err != nil { 813 return errors.Errorf("range span (%w) error for %s", err, e.pgf.File.Name) 814 } 815 e.end = e.start + token.Pos(span.End().Offset()) 816 e.start += token.Pos(span.Start().Offset()) 817 return nil 818 } 819 820 func (e *encoded) Data() []uint32 { 821 // binary operators, at least, will be out of order 822 sort.Slice(e.items, func(i, j int) bool { 823 if e.items[i].line != e.items[j].line { 824 return e.items[i].line < e.items[j].line 825 } 826 return e.items[i].start < e.items[j].start 827 }) 828 typeMap, modMap := e.maps() 829 // each semantic token needs five values 830 // (see Integer Encoding for Tokens in the LSP spec) 831 x := make([]uint32, 5*len(e.items)) 832 var j int 833 for i := 0; i < len(e.items); i++ { 834 typ, ok := typeMap[e.items[i].typeStr] 835 if !ok { 836 continue // client doesn't want typeStr 837 } 838 if i == 0 { 839 x[0] = e.items[0].line 840 } else { 841 x[j] = e.items[i].line - e.items[i-1].line 842 } 843 x[j+1] = e.items[i].start 844 if i > 0 && e.items[i].line == e.items[i-1].line { 845 x[j+1] = e.items[i].start - e.items[i-1].start 846 } 847 x[j+2] = e.items[i].len 848 x[j+3] = uint32(typ) 849 mask := 0 850 for _, s := range e.items[i].mods { 851 // modMap[s] is 0 if the client doesn't want this modifier 852 mask |= modMap[s] 853 } 854 x[j+4] = uint32(mask) 855 j += 5 856 } 857 return x[:j] 858 } 859 860 func (e *encoded) importSpec(d *ast.ImportSpec) { 861 // a local package name or the last component of the Path 862 if d.Name != nil { 863 nm := d.Name.String() 864 if nm != "_" && nm != "." { 865 e.token(d.Name.Pos(), len(nm), tokNamespace, nil) 866 } 867 return // don't mark anything for . or _ 868 } 869 val := d.Path.Value 870 if len(val) < 2 || val[0] != '"' || val[len(val)-1] != '"' { 871 // avoid panics on imports without a properly quoted string 872 return 873 } 874 nm := val[1 : len(val)-1] // remove surrounding "s 875 // Import strings are implementation defined. Try to match with parse information. 876 x, err := e.pkg.GetImport(nm) 877 if err != nil { 878 // unexpected, but impact is that maybe some import is not colored 879 return 880 } 881 // expect that nm is x.PkgPath and that x.Name() is a component of it 882 if x.PkgPath() != nm { 883 // don't know how or what to color (if this can happen at all) 884 return 885 } 886 // this is not a precise test: imagine "github.com/nasty/v/v2" 887 j := strings.LastIndex(nm, x.Name()) 888 if j == -1 { 889 // name doesn't show up, for whatever reason, so nothing to report 890 return 891 } 892 start := d.Path.Pos() + 1 + token.Pos(j) // skip the initial quote 893 e.token(start, len(x.Name()), tokNamespace, nil) 894 } 895 896 // log unexpected state 897 func (e *encoded) unexpected(msg string) { 898 if semDebug { 899 panic(msg) 900 } 901 event.Error(e.ctx, e.strStack(), errors.New(msg)) 902 } 903 904 // SemType returns a string equivalent of the type, for gopls semtok 905 func SemType(n int) string { 906 tokTypes := SemanticTypes() 907 tokMods := SemanticModifiers() 908 if n >= 0 && n < len(tokTypes) { 909 return tokTypes[n] 910 } 911 return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods)) 912 } 913 914 // SemMods returns the []string equivalent of the mods, for gopls semtok. 915 func SemMods(n int) []string { 916 tokMods := SemanticModifiers() 917 mods := []string{} 918 for i := 0; i < len(tokMods); i++ { 919 if (n & (1 << uint(i))) != 0 { 920 mods = append(mods, tokMods[i]) 921 } 922 } 923 return mods 924 } 925 926 func (e *encoded) maps() (map[tokenType]int, map[string]int) { 927 tmap := make(map[tokenType]int) 928 mmap := make(map[string]int) 929 for i, t := range e.tokTypes { 930 tmap[tokenType(t)] = i 931 } 932 for i, m := range e.tokMods { 933 mmap[m] = 1 << uint(i) // go 1.12 compatibility 934 } 935 return tmap, mmap 936 } 937 938 // SemanticTypes to use in case there is no client, as in the command line, or tests 939 func SemanticTypes() []string { 940 return semanticTypes[:] 941 } 942 943 // SemanticModifiers to use in case there is no client. 944 func SemanticModifiers() []string { 945 return semanticModifiers[:] 946 } 947 948 var ( 949 semanticTypes = [...]string{ 950 "namespace", "type", "class", "enum", "interface", 951 "struct", "typeParameter", "parameter", "variable", "property", "enumMember", 952 "event", "function", "method", "macro", "keyword", "modifier", "comment", 953 "string", "number", "regexp", "operator", 954 } 955 semanticModifiers = [...]string{ 956 "declaration", "definition", "readonly", "static", 957 "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", 958 } 959 )