github.com/powerman/golang-tools@v0.1.11-0.20220410185822-5ad214d8d803/internal/lsp/semantic.go (about) 1 // Copyright 2020 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package lsp 6 7 import ( 8 "bytes" 9 "context" 10 "fmt" 11 "go/ast" 12 "go/token" 13 "go/types" 14 "log" 15 "path/filepath" 16 "sort" 17 "strings" 18 "time" 19 20 "github.com/powerman/golang-tools/internal/event" 21 "github.com/powerman/golang-tools/internal/lsp/protocol" 22 "github.com/powerman/golang-tools/internal/lsp/source" 23 "github.com/powerman/golang-tools/internal/lsp/template" 24 "github.com/powerman/golang-tools/internal/typeparams" 25 errors "golang.org/x/xerrors" 26 ) 27 28 // The LSP says that errors for the semantic token requests should only be returned 29 // for exceptions (a word not otherwise defined). This code treats a too-large file 30 // as an exception. On parse errors, the code does what it can. 31 32 // reject full semantic token requests for large files 33 const maxFullFileSize int = 100000 34 35 // to control comprehensive logging of decisions (gopls semtok foo.go > /dev/null shows log output) 36 // semDebug should NEVER be true in checked-in code 37 const semDebug = false 38 39 func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { 40 ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil) 41 return ret, err 42 } 43 44 func (s *Server) semanticTokensFullDelta(ctx context.Context, p *protocol.SemanticTokensDeltaParams) (interface{}, error) { 45 return nil, errors.Errorf("implement SemanticTokensFullDelta") 46 } 47 48 func (s *Server) semanticTokensRange(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { 49 ret, err := s.computeSemanticTokens(ctx, p.TextDocument, &p.Range) 50 return ret, err 51 } 52 53 func (s *Server) semanticTokensRefresh(ctx context.Context) error { 54 // in the code, but not in the protocol spec 55 return errors.Errorf("implement SemanticTokensRefresh") 56 } 57 58 func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { 59 ans := protocol.SemanticTokens{ 60 Data: []uint32{}, 61 } 62 snapshot, fh, ok, release, err := s.beginFileRequest(ctx, td.URI, source.UnknownKind) 63 defer release() 64 if !ok { 65 return nil, err 66 } 67 vv := snapshot.View() 68 if !vv.Options().SemanticTokens { 69 // return an error, so if the option changes 70 // the client won't remember the wrong answer 71 return nil, errors.Errorf("semantictokens are disabled") 72 } 73 kind := snapshot.View().FileKind(fh) 74 if kind == source.Tmpl { 75 // this is a little cumbersome to avoid both exporting 'encoded' and its methods 76 // and to avoid import cycles 77 e := &encoded{ 78 ctx: ctx, 79 rng: rng, 80 tokTypes: s.session.Options().SemanticTypes, 81 tokMods: s.session.Options().SemanticMods, 82 } 83 add := func(line, start uint32, len uint32) { 84 e.add(line, start, len, tokMacro, nil) 85 } 86 data := func() []uint32 { 87 return e.Data() 88 } 89 return template.SemanticTokens(ctx, snapshot, fh.URI(), add, data) 90 } 91 if kind != source.Go { 92 return nil, nil 93 } 94 pkg, err := snapshot.PackageForFile(ctx, fh.URI(), source.TypecheckFull, source.WidestPackage) 95 if err != nil { 96 return nil, err 97 } 98 pgf, err := pkg.File(fh.URI()) 99 if err != nil { 100 return nil, err 101 } 102 // ignore pgf.ParseErr. Do what we can. 103 if rng == nil && len(pgf.Src) > maxFullFileSize { 104 err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)", 105 fh.URI().Filename(), len(pgf.Src), maxFullFileSize) 106 return nil, err 107 } 108 e := &encoded{ 109 ctx: ctx, 110 pgf: pgf, 111 rng: rng, 112 ti: pkg.GetTypesInfo(), 113 pkg: pkg, 114 fset: snapshot.FileSet(), 115 tokTypes: s.session.Options().SemanticTypes, 116 tokMods: s.session.Options().SemanticMods, 117 } 118 if err := e.init(); err != nil { 119 // e.init should never return an error, unless there's some 120 // seemingly impossible race condition 121 return nil, err 122 } 123 e.semantics() 124 ans.Data = e.Data() 125 // For delta requests, but we've never seen any. 126 ans.ResultID = fmt.Sprintf("%v", time.Now()) 127 return &ans, nil 128 } 129 130 func (e *encoded) semantics() { 131 f := e.pgf.File 132 // may not be in range, but harmless 133 e.token(f.Package, len("package"), tokKeyword, nil) 134 e.token(f.Name.NamePos, len(f.Name.Name), tokNamespace, nil) 135 inspect := func(n ast.Node) bool { 136 return e.inspector(n) 137 } 138 for _, d := range f.Decls { 139 // only look at the decls that overlap the range 140 start, end := d.Pos(), d.End() 141 if end <= e.start || start >= e.end { 142 continue 143 } 144 ast.Inspect(d, inspect) 145 } 146 for _, cg := range f.Comments { 147 for _, c := range cg.List { 148 if !strings.Contains(c.Text, "\n") { 149 e.token(c.Pos(), len(c.Text), tokComment, nil) 150 continue 151 } 152 e.multiline(c.Pos(), c.End(), c.Text, tokComment) 153 } 154 } 155 } 156 157 type tokenType string 158 159 const ( 160 tokNamespace tokenType = "namespace" 161 tokType tokenType = "type" 162 tokInterface tokenType = "interface" 163 tokTypeParam tokenType = "typeParameter" 164 tokParameter tokenType = "parameter" 165 tokVariable tokenType = "variable" 166 tokMethod tokenType = "method" 167 tokFunction tokenType = "function" 168 tokKeyword tokenType = "keyword" 169 tokComment tokenType = "comment" 170 tokString tokenType = "string" 171 tokNumber tokenType = "number" 172 tokOperator tokenType = "operator" 173 174 tokMacro tokenType = "macro" // for templates 175 ) 176 177 func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) { 178 179 if !start.IsValid() { 180 // This is not worth reporting 181 return 182 } 183 if start >= e.end || start+token.Pos(leng) <= e.start { 184 return 185 } 186 // want a line and column from start (in LSP coordinates) 187 // [//line directives should be ignored] 188 rng := source.NewMappedRange(e.fset, e.pgf.Mapper, start, start+token.Pos(leng)) 189 lspRange, err := rng.Range() 190 if err != nil { 191 // possibly a //line directive. TODO(pjw): fix this somehow 192 // "column mapper is for file...instead of..." 193 // "line is beyond end of file..." 194 // see line 116 of internal/span/token.go which uses Position not PositionFor 195 // (it is too verbose to print the error on every token. some other RPC will fail) 196 // event.Error(e.ctx, "failed to convert to range", err) 197 return 198 } 199 if lspRange.End.Line != lspRange.Start.Line { 200 // this happens if users are typing at the end of the file, but report nothing 201 return 202 } 203 // token is all on one line 204 length := lspRange.End.Character - lspRange.Start.Character 205 e.add(lspRange.Start.Line, lspRange.Start.Character, length, typ, mods) 206 } 207 208 func (e *encoded) add(line, start uint32, len uint32, tok tokenType, mod []string) { 209 x := semItem{line, start, len, tok, mod} 210 e.items = append(e.items, x) 211 } 212 213 // semItem represents a token found walking the parse tree 214 type semItem struct { 215 line, start uint32 216 len uint32 217 typeStr tokenType 218 mods []string 219 } 220 221 type encoded struct { 222 // the generated data 223 items []semItem 224 225 ctx context.Context 226 tokTypes, tokMods []string 227 pgf *source.ParsedGoFile 228 rng *protocol.Range 229 ti *types.Info 230 pkg source.Package 231 fset *token.FileSet 232 // allowed starting and ending token.Pos, set by init 233 // used to avoid looking at declarations not in range 234 start, end token.Pos 235 // path from the root of the parse tree, used for debugging 236 stack []ast.Node 237 } 238 239 // convert the stack to a string, for debugging 240 func (e *encoded) strStack() string { 241 msg := []string{"["} 242 for i := len(e.stack) - 1; i >= 0; i-- { 243 s := e.stack[i] 244 msg = append(msg, fmt.Sprintf("%T", s)[5:]) 245 } 246 if len(e.stack) > 0 { 247 loc := e.stack[len(e.stack)-1].Pos() 248 if !source.InRange(e.pgf.Tok, loc) { 249 msg = append(msg, fmt.Sprintf("invalid position %v for %s", loc, e.pgf.URI)) 250 } else if locInRange(e.pgf.Tok, loc) { 251 add := e.pgf.Tok.PositionFor(loc, false) 252 nm := filepath.Base(add.Filename) 253 msg = append(msg, fmt.Sprintf("(%s:%d,col:%d)", nm, add.Line, add.Column)) 254 } else { 255 msg = append(msg, fmt.Sprintf("(loc %d out of range)", loc)) 256 } 257 } 258 msg = append(msg, "]") 259 return strings.Join(msg, " ") 260 } 261 262 // avoid panic in token.PostionFor() when typing at the end of the file 263 func locInRange(f *token.File, loc token.Pos) bool { 264 return f.Base() <= int(loc) && int(loc) < f.Base()+f.Size() 265 } 266 267 // find the line in the source 268 func (e *encoded) srcLine(x ast.Node) string { 269 file := e.pgf.Tok 270 line := file.Line(x.Pos()) 271 start, err := source.Offset(file, file.LineStart(line)) 272 if err != nil { 273 return "" 274 } 275 end := start 276 for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ { 277 278 } 279 ans := e.pgf.Src[start:end] 280 return string(ans) 281 } 282 283 func (e *encoded) inspector(n ast.Node) bool { 284 pop := func() { 285 e.stack = e.stack[:len(e.stack)-1] 286 } 287 if n == nil { 288 pop() 289 return true 290 } 291 e.stack = append(e.stack, n) 292 switch x := n.(type) { 293 case *ast.ArrayType: 294 case *ast.AssignStmt: 295 e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil) 296 case *ast.BasicLit: 297 if strings.Contains(x.Value, "\n") { 298 // has to be a string 299 e.multiline(x.Pos(), x.End(), x.Value, tokString) 300 break 301 } 302 ln := len(x.Value) 303 what := tokNumber 304 if x.Kind == token.STRING { 305 what = tokString 306 if _, ok := e.stack[len(e.stack)-2].(*ast.Field); ok { 307 // struct tags (this is probably pointless, as the 308 // TextMate grammar will treat all the other comments the same) 309 what = tokComment 310 } 311 } 312 e.token(x.Pos(), ln, what, nil) 313 case *ast.BinaryExpr: 314 e.token(x.OpPos, len(x.Op.String()), tokOperator, nil) 315 case *ast.BlockStmt: 316 case *ast.BranchStmt: 317 e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil) 318 // There's no semantic encoding for labels 319 case *ast.CallExpr: 320 if x.Ellipsis != token.NoPos { 321 e.token(x.Ellipsis, len("..."), tokOperator, nil) 322 } 323 case *ast.CaseClause: 324 iam := "case" 325 if x.List == nil { 326 iam = "default" 327 } 328 e.token(x.Case, len(iam), tokKeyword, nil) 329 case *ast.ChanType: 330 // chan | chan <- | <- chan 331 switch { 332 case x.Arrow == token.NoPos: 333 e.token(x.Begin, len("chan"), tokKeyword, nil) 334 case x.Arrow == x.Begin: 335 e.token(x.Arrow, 2, tokOperator, nil) 336 pos := e.findKeyword("chan", x.Begin+2, x.Value.Pos()) 337 e.token(pos, len("chan"), tokKeyword, nil) 338 case x.Arrow != x.Begin: 339 e.token(x.Begin, len("chan"), tokKeyword, nil) 340 e.token(x.Arrow, 2, tokOperator, nil) 341 } 342 case *ast.CommClause: 343 iam := len("case") 344 if x.Comm == nil { 345 iam = len("default") 346 } 347 e.token(x.Case, iam, tokKeyword, nil) 348 case *ast.CompositeLit: 349 case *ast.DeclStmt: 350 case *ast.DeferStmt: 351 e.token(x.Defer, len("defer"), tokKeyword, nil) 352 case *ast.Ellipsis: 353 e.token(x.Ellipsis, len("..."), tokOperator, nil) 354 case *ast.EmptyStmt: 355 case *ast.ExprStmt: 356 case *ast.Field: 357 case *ast.FieldList: 358 case *ast.ForStmt: 359 e.token(x.For, len("for"), tokKeyword, nil) 360 case *ast.FuncDecl: 361 case *ast.FuncLit: 362 case *ast.FuncType: 363 if x.Func != token.NoPos { 364 e.token(x.Func, len("func"), tokKeyword, nil) 365 } 366 case *ast.GenDecl: 367 e.token(x.TokPos, len(x.Tok.String()), tokKeyword, nil) 368 case *ast.GoStmt: 369 e.token(x.Go, len("go"), tokKeyword, nil) 370 case *ast.Ident: 371 e.ident(x) 372 case *ast.IfStmt: 373 e.token(x.If, len("if"), tokKeyword, nil) 374 if x.Else != nil { 375 // x.Body.End() or x.Body.End()+1, not that it matters 376 pos := e.findKeyword("else", x.Body.End(), x.Else.Pos()) 377 e.token(pos, len("else"), tokKeyword, nil) 378 } 379 case *ast.ImportSpec: 380 e.importSpec(x) 381 pop() 382 return false 383 case *ast.IncDecStmt: 384 e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil) 385 case *ast.IndexExpr: 386 case *typeparams.IndexListExpr: // accommodate generics 387 case *ast.InterfaceType: 388 e.token(x.Interface, len("interface"), tokKeyword, nil) 389 case *ast.KeyValueExpr: 390 case *ast.LabeledStmt: 391 case *ast.MapType: 392 e.token(x.Map, len("map"), tokKeyword, nil) 393 case *ast.ParenExpr: 394 case *ast.RangeStmt: 395 e.token(x.For, len("for"), tokKeyword, nil) 396 // x.TokPos == token.NoPos is legal (for range foo {}) 397 offset := x.TokPos 398 if offset == token.NoPos { 399 offset = x.For 400 } 401 pos := e.findKeyword("range", offset, x.X.Pos()) 402 e.token(pos, len("range"), tokKeyword, nil) 403 case *ast.ReturnStmt: 404 e.token(x.Return, len("return"), tokKeyword, nil) 405 case *ast.SelectStmt: 406 e.token(x.Select, len("select"), tokKeyword, nil) 407 case *ast.SelectorExpr: 408 case *ast.SendStmt: 409 e.token(x.Arrow, len("<-"), tokOperator, nil) 410 case *ast.SliceExpr: 411 case *ast.StarExpr: 412 e.token(x.Star, len("*"), tokOperator, nil) 413 case *ast.StructType: 414 e.token(x.Struct, len("struct"), tokKeyword, nil) 415 case *ast.SwitchStmt: 416 e.token(x.Switch, len("switch"), tokKeyword, nil) 417 case *ast.TypeAssertExpr: 418 if x.Type == nil { 419 pos := e.findKeyword("type", x.Lparen, x.Rparen) 420 e.token(pos, len("type"), tokKeyword, nil) 421 } 422 case *ast.TypeSpec: 423 case *ast.TypeSwitchStmt: 424 e.token(x.Switch, len("switch"), tokKeyword, nil) 425 case *ast.UnaryExpr: 426 e.token(x.OpPos, len(x.Op.String()), tokOperator, nil) 427 case *ast.ValueSpec: 428 // things only seen with parsing or type errors, so ignore them 429 case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: 430 return true 431 // not going to see these 432 case *ast.File, *ast.Package: 433 e.unexpected(fmt.Sprintf("implement %T %s", x, e.pgf.Tok.PositionFor(x.Pos(), false))) 434 // other things we knowingly ignore 435 case *ast.Comment, *ast.CommentGroup: 436 pop() 437 return false 438 default: 439 e.unexpected(fmt.Sprintf("failed to implement %T", x)) 440 } 441 return true 442 } 443 444 func (e *encoded) ident(x *ast.Ident) { 445 if e.ti == nil { 446 what, mods := e.unkIdent(x) 447 if what != "" { 448 e.token(x.Pos(), len(x.String()), what, mods) 449 } 450 if semDebug { 451 log.Printf(" nil %s/nil/nil %q %v %s", x.String(), what, mods, e.strStack()) 452 } 453 return 454 } 455 def := e.ti.Defs[x] 456 if def != nil { 457 what, mods := e.definitionFor(x, def) 458 if what != "" { 459 e.token(x.Pos(), len(x.String()), what, mods) 460 } 461 if semDebug { 462 log.Printf(" for %s/%T/%T got %s %v (%s)", x.String(), def, def.Type(), what, mods, e.strStack()) 463 } 464 return 465 } 466 use := e.ti.Uses[x] 467 tok := func(pos token.Pos, lng int, tok tokenType, mods []string) { 468 e.token(pos, lng, tok, mods) 469 q := "nil" 470 if use != nil { 471 q = fmt.Sprintf("%T", use.Type()) 472 } 473 if semDebug { 474 log.Printf(" use %s/%T/%s got %s %v (%s)", x.String(), use, q, tok, mods, e.strStack()) 475 } 476 } 477 478 switch y := use.(type) { 479 case nil: 480 what, mods := e.unkIdent(x) 481 if what != "" { 482 tok(x.Pos(), len(x.String()), what, mods) 483 } else if semDebug { 484 // tok() wasn't called, so didn't log 485 log.Printf(" nil %s/%T/nil %q %v (%s)", x.String(), use, what, mods, e.strStack()) 486 } 487 return 488 case *types.Builtin: 489 tok(x.NamePos, len(x.Name), tokFunction, []string{"defaultLibrary"}) 490 case *types.Const: 491 mods := []string{"readonly"} 492 tt := y.Type() 493 if _, ok := tt.(*types.Basic); ok { 494 tok(x.Pos(), len(x.String()), tokVariable, mods) 495 break 496 } 497 if ttx, ok := tt.(*types.Named); ok { 498 if x.String() == "iota" { 499 e.unexpected(fmt.Sprintf("iota:%T", ttx)) 500 } 501 if _, ok := ttx.Underlying().(*types.Basic); ok { 502 tok(x.Pos(), len(x.String()), tokVariable, mods) 503 break 504 } 505 e.unexpected(fmt.Sprintf("%q/%T", x.String(), tt)) 506 } 507 // can this happen? Don't think so 508 e.unexpected(fmt.Sprintf("%s %T %#v", x.String(), tt, tt)) 509 case *types.Func: 510 tok(x.Pos(), len(x.Name), tokFunction, nil) 511 case *types.Label: 512 // nothing to map it to 513 case *types.Nil: 514 // nil is a predeclared identifier 515 tok(x.Pos(), len("nil"), tokVariable, []string{"readonly", "defaultLibrary"}) 516 case *types.PkgName: 517 tok(x.Pos(), len(x.Name), tokNamespace, nil) 518 case *types.TypeName: // could be a tokTpeParam 519 var mods []string 520 if _, ok := y.Type().(*types.Basic); ok { 521 mods = []string{"defaultLibrary"} 522 } else if _, ok := y.Type().(*typeparams.TypeParam); ok { 523 tok(x.Pos(), len(x.String()), tokTypeParam, mods) 524 break 525 } 526 tok(x.Pos(), len(x.String()), tokType, mods) 527 case *types.Var: 528 if isSignature(y) { 529 tok(x.Pos(), len(x.Name), tokFunction, nil) 530 } else if _, ok := y.Type().(*typeparams.TypeParam); ok { 531 tok(x.Pos(), len(x.Name), tokTypeParam, nil) 532 } else { 533 tok(x.Pos(), len(x.Name), tokVariable, nil) 534 } 535 default: 536 // can't happen 537 if use == nil { 538 msg := fmt.Sprintf("%#v/%#v %#v %#v", x, x.Obj, e.ti.Defs[x], e.ti.Uses[x]) 539 e.unexpected(msg) 540 } 541 if use.Type() != nil { 542 e.unexpected(fmt.Sprintf("%s %T/%T,%#v", x.String(), use, use.Type(), use)) 543 } else { 544 e.unexpected(fmt.Sprintf("%s %T", x.String(), use)) 545 } 546 } 547 } 548 549 func isSignature(use types.Object) bool { 550 if true { 551 return false //PJW: fix after generics seem ok 552 } 553 if _, ok := use.(*types.Var); !ok { 554 return false 555 } 556 v := use.Type() 557 if v == nil { 558 return false 559 } 560 if _, ok := v.(*types.Signature); ok { 561 return true 562 } 563 return false 564 } 565 566 // both e.ti.Defs and e.ti.Uses are nil. use the parse stack. 567 // a lot of these only happen when the package doesn't compile 568 // but in that case it is all best-effort from the parse tree 569 func (e *encoded) unkIdent(x *ast.Ident) (tokenType, []string) { 570 def := []string{"definition"} 571 n := len(e.stack) - 2 // parent of Ident 572 if n < 0 { 573 e.unexpected("no stack?") 574 return "", nil 575 } 576 switch nd := e.stack[n].(type) { 577 case *ast.BinaryExpr, *ast.UnaryExpr, *ast.ParenExpr, *ast.StarExpr, 578 *ast.IncDecStmt, *ast.SliceExpr, *ast.ExprStmt, *ast.IndexExpr, 579 *ast.ReturnStmt, *ast.ChanType, *ast.SendStmt, 580 *ast.ForStmt, // possibly incomplete 581 *ast.IfStmt, /* condition */ 582 *ast.KeyValueExpr: // either key or value 583 return tokVariable, nil 584 case *typeparams.IndexListExpr: // generic? 585 return tokVariable, nil 586 case *ast.Ellipsis: 587 return tokType, nil 588 case *ast.CaseClause: 589 if n-2 >= 0 { 590 if _, ok := e.stack[n-2].(*ast.TypeSwitchStmt); ok { 591 return tokType, nil 592 } 593 } 594 return tokVariable, nil 595 case *ast.ArrayType: 596 if x == nd.Len { 597 // or maybe a Type Param, but we can't just from the parse tree 598 return tokVariable, nil 599 } else { 600 return tokType, nil 601 } 602 case *ast.MapType: 603 return tokType, nil 604 case *ast.CallExpr: 605 if x == nd.Fun { 606 return tokFunction, nil 607 } 608 return tokVariable, nil 609 case *ast.SwitchStmt: 610 return tokVariable, nil 611 case *ast.TypeAssertExpr: 612 if x == nd.X { 613 return tokVariable, nil 614 } else if x == nd.Type { 615 return tokType, nil 616 } 617 case *ast.ValueSpec: 618 for _, p := range nd.Names { 619 if p == x { 620 return tokVariable, def 621 } 622 } 623 for _, p := range nd.Values { 624 if p == x { 625 return tokVariable, nil 626 } 627 } 628 return tokType, nil 629 case *ast.SelectorExpr: // e.ti.Selections[nd] is nil, so no help 630 if n-1 >= 0 { 631 if ce, ok := e.stack[n-1].(*ast.CallExpr); ok { 632 // ... CallExpr SelectorExpr Ident (_.x()) 633 if ce.Fun == nd && nd.Sel == x { 634 return tokFunction, nil 635 } 636 } 637 } 638 return tokVariable, nil 639 case *ast.AssignStmt: 640 for _, p := range nd.Lhs { 641 // x := ..., or x = ... 642 if p == x { 643 if nd.Tok != token.DEFINE { 644 def = nil 645 } 646 return tokVariable, def 647 } 648 } 649 // RHS, = x 650 return tokVariable, nil 651 case *ast.TypeSpec: // it's a type if it is either the Name or the Type 652 if x == nd.Type { 653 def = nil 654 } 655 return tokType, def 656 case *ast.Field: 657 // ident could be type in a field, or a method in an interface type, or a variable 658 if x == nd.Type { 659 return tokType, nil 660 } 661 if n-2 >= 0 { 662 _, okit := e.stack[n-2].(*ast.InterfaceType) 663 _, okfl := e.stack[n-1].(*ast.FieldList) 664 if okit && okfl { 665 return tokMethod, def 666 } 667 } 668 return tokVariable, nil 669 case *ast.LabeledStmt, *ast.BranchStmt: 670 // nothing to report 671 case *ast.CompositeLit: 672 if nd.Type == x { 673 return tokType, nil 674 } 675 return tokVariable, nil 676 case *ast.RangeStmt: 677 if nd.Tok != token.DEFINE { 678 def = nil 679 } 680 return tokVariable, def 681 case *ast.FuncDecl: 682 return tokFunction, def 683 default: 684 msg := fmt.Sprintf("%T undexpected: %s %s%q", nd, x.Name, e.strStack(), e.srcLine(x)) 685 e.unexpected(msg) 686 } 687 return "", nil 688 } 689 690 func isDeprecated(n *ast.CommentGroup) bool { 691 if n == nil { 692 return false 693 } 694 for _, c := range n.List { 695 if strings.HasPrefix(c.Text, "// Deprecated") { 696 return true 697 } 698 } 699 return false 700 } 701 702 func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []string) { 703 // PJW: def == types.Label? probably a nothing 704 // PJW: look into replaceing these syntactic tests with types more generally 705 mods := []string{"definition"} 706 for i := len(e.stack) - 1; i >= 0; i-- { 707 s := e.stack[i] 708 switch y := s.(type) { 709 case *ast.AssignStmt, *ast.RangeStmt: 710 if x.Name == "_" { 711 return "", nil // not really a variable 712 } 713 return tokVariable, mods 714 case *ast.GenDecl: 715 if isDeprecated(y.Doc) { 716 mods = append(mods, "deprecated") 717 } 718 if y.Tok == token.CONST { 719 mods = append(mods, "readonly") 720 } 721 return tokVariable, mods 722 case *ast.FuncDecl: 723 // If x is immediately under a FuncDecl, it is a function or method 724 if i == len(e.stack)-2 { 725 if isDeprecated(y.Doc) { 726 mods = append(mods, "deprecated") 727 } 728 if y.Recv != nil { 729 return tokMethod, mods 730 } 731 return tokFunction, mods 732 } 733 // if x < ... < FieldList < FuncDecl, this is the receiver, a variable 734 if _, ok := e.stack[i+1].(*ast.FieldList); ok { 735 return tokVariable, nil 736 } 737 // if x < ... < FieldList < FuncType < FuncDecl, this is a param 738 return tokParameter, mods 739 case *ast.FuncType: 740 return tokParameter, mods 741 case *ast.InterfaceType: 742 return tokMethod, mods 743 case *ast.TypeSpec: 744 // GenDecl/Typespec/FuncType/FieldList/Field/Ident 745 // (type A func(b uint64)) (err error) 746 // b and err should not be tokType, but tokVaraible 747 // and in GenDecl/TpeSpec/StructType/FieldList/Field/Ident 748 // (type A struct{b uint64} 749 // but on type B struct{C}), C is a type, but is not being defined. 750 // GenDecl/TypeSpec/FieldList/Field/Ident is a typeParam 751 if _, ok := e.stack[i+1].(*ast.FieldList); ok { 752 return tokTypeParam, mods 753 } 754 fldm := e.stack[len(e.stack)-2] 755 if fld, ok := fldm.(*ast.Field); ok { 756 // if len(fld.names) == 0 this is a tokType, being used 757 if len(fld.Names) == 0 { 758 return tokType, nil 759 } 760 return tokVariable, mods 761 } 762 return tokType, mods 763 } 764 } 765 // can't happen 766 msg := fmt.Sprintf("failed to find the decl for %s", e.pgf.Tok.PositionFor(x.Pos(), false)) 767 e.unexpected(msg) 768 return "", []string{""} 769 } 770 771 func (e *encoded) multiline(start, end token.Pos, val string, tok tokenType) { 772 f := e.fset.File(start) 773 // the hard part is finding the lengths of lines. include the \n 774 leng := func(line int) int { 775 n := f.LineStart(line) 776 if line >= f.LineCount() { 777 return f.Size() - int(n) 778 } 779 return int(f.LineStart(line+1) - n) 780 } 781 spos := e.fset.PositionFor(start, false) 782 epos := e.fset.PositionFor(end, false) 783 sline := spos.Line 784 eline := epos.Line 785 // first line is from spos.Column to end 786 e.token(start, leng(sline)-spos.Column, tok, nil) // leng(sline)-1 - (spos.Column-1) 787 for i := sline + 1; i < eline; i++ { 788 // intermediate lines are from 1 to end 789 e.token(f.LineStart(i), leng(i)-1, tok, nil) // avoid the newline 790 } 791 // last line is from 1 to epos.Column 792 e.token(f.LineStart(eline), epos.Column-1, tok, nil) // columns are 1-based 793 } 794 795 // findKeyword finds a keyword rather than guessing its location 796 func (e *encoded) findKeyword(keyword string, start, end token.Pos) token.Pos { 797 offset := int(start) - e.pgf.Tok.Base() 798 last := int(end) - e.pgf.Tok.Base() 799 buf := e.pgf.Src 800 idx := bytes.Index(buf[offset:last], []byte(keyword)) 801 if idx != -1 { 802 return start + token.Pos(idx) 803 } 804 //(in unparsable programs: type _ <-<-chan int) 805 e.unexpected(fmt.Sprintf("not found:%s %v", keyword, e.fset.PositionFor(start, false))) 806 return token.NoPos 807 } 808 809 func (e *encoded) init() error { 810 e.start = token.Pos(e.pgf.Tok.Base()) 811 e.end = e.start + token.Pos(e.pgf.Tok.Size()) 812 if e.rng == nil { 813 return nil 814 } 815 span, err := e.pgf.Mapper.RangeSpan(*e.rng) 816 if err != nil { 817 return errors.Errorf("range span (%w) error for %s", err, e.pgf.File.Name) 818 } 819 e.end = e.start + token.Pos(span.End().Offset()) 820 e.start += token.Pos(span.Start().Offset()) 821 return nil 822 } 823 824 func (e *encoded) Data() []uint32 { 825 // binary operators, at least, will be out of order 826 sort.Slice(e.items, func(i, j int) bool { 827 if e.items[i].line != e.items[j].line { 828 return e.items[i].line < e.items[j].line 829 } 830 return e.items[i].start < e.items[j].start 831 }) 832 typeMap, modMap := e.maps() 833 // each semantic token needs five values 834 // (see Integer Encoding for Tokens in the LSP spec) 835 x := make([]uint32, 5*len(e.items)) 836 var j int 837 var last semItem 838 for i := 0; i < len(e.items); i++ { 839 typ, ok := typeMap[e.items[i].typeStr] 840 if !ok { 841 continue // client doesn't want typeStr 842 } 843 if j == 0 { 844 x[0] = e.items[0].line 845 } else { 846 x[j] = e.items[i].line - last.line 847 } 848 x[j+1] = e.items[i].start 849 if j > 0 && x[j] == 0 { 850 x[j+1] = e.items[i].start - last.start 851 } 852 x[j+2] = e.items[i].len 853 x[j+3] = uint32(typ) 854 mask := 0 855 for _, s := range e.items[i].mods { 856 // modMap[s] is 0 if the client doesn't want this modifier 857 mask |= modMap[s] 858 } 859 x[j+4] = uint32(mask) 860 j += 5 861 last = e.items[i] 862 } 863 return x[:j] 864 } 865 866 func (e *encoded) importSpec(d *ast.ImportSpec) { 867 // a local package name or the last component of the Path 868 if d.Name != nil { 869 nm := d.Name.String() 870 if nm != "_" && nm != "." { 871 e.token(d.Name.Pos(), len(nm), tokNamespace, nil) 872 } 873 return // don't mark anything for . or _ 874 } 875 val := d.Path.Value 876 if len(val) < 2 || val[0] != '"' || val[len(val)-1] != '"' { 877 // avoid panics on imports without a properly quoted string 878 return 879 } 880 nm := val[1 : len(val)-1] // remove surrounding "s 881 // Import strings are implementation defined. Try to match with parse information. 882 x, err := e.pkg.GetImport(nm) 883 if err != nil { 884 // unexpected, but impact is that maybe some import is not colored 885 return 886 } 887 // expect that nm is x.PkgPath and that x.Name() is a component of it 888 if x.PkgPath() != nm { 889 // don't know how or what to color (if this can happen at all) 890 return 891 } 892 // this is not a precise test: imagine "github.com/nasty/v/v2" 893 j := strings.LastIndex(nm, x.Name()) 894 if j == -1 { 895 // name doesn't show up, for whatever reason, so nothing to report 896 return 897 } 898 start := d.Path.Pos() + 1 + token.Pos(j) // skip the initial quote 899 e.token(start, len(x.Name()), tokNamespace, nil) 900 } 901 902 // log unexpected state 903 func (e *encoded) unexpected(msg string) { 904 if semDebug { 905 panic(msg) 906 } 907 event.Error(e.ctx, e.strStack(), errors.New(msg)) 908 } 909 910 // SemType returns a string equivalent of the type, for gopls semtok 911 func SemType(n int) string { 912 tokTypes := SemanticTypes() 913 tokMods := SemanticModifiers() 914 if n >= 0 && n < len(tokTypes) { 915 return tokTypes[n] 916 } 917 return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods)) 918 } 919 920 // SemMods returns the []string equivalent of the mods, for gopls semtok. 921 func SemMods(n int) []string { 922 tokMods := SemanticModifiers() 923 mods := []string{} 924 for i := 0; i < len(tokMods); i++ { 925 if (n & (1 << uint(i))) != 0 { 926 mods = append(mods, tokMods[i]) 927 } 928 } 929 return mods 930 } 931 932 func (e *encoded) maps() (map[tokenType]int, map[string]int) { 933 tmap := make(map[tokenType]int) 934 mmap := make(map[string]int) 935 for i, t := range e.tokTypes { 936 tmap[tokenType(t)] = i 937 } 938 for i, m := range e.tokMods { 939 mmap[m] = 1 << uint(i) // go 1.12 compatibility 940 } 941 return tmap, mmap 942 } 943 944 // SemanticTypes to use in case there is no client, as in the command line, or tests 945 func SemanticTypes() []string { 946 return semanticTypes[:] 947 } 948 949 // SemanticModifiers to use in case there is no client. 950 func SemanticModifiers() []string { 951 return semanticModifiers[:] 952 } 953 954 var ( 955 semanticTypes = [...]string{ 956 "namespace", "type", "class", "enum", "interface", 957 "struct", "typeParameter", "parameter", "variable", "property", "enumMember", 958 "event", "function", "method", "macro", "keyword", "modifier", "comment", 959 "string", "number", "regexp", "operator", 960 } 961 semanticModifiers = [...]string{ 962 "declaration", "definition", "readonly", "static", 963 "deprecated", "abstract", "async", "modification", "documentation", "defaultLibrary", 964 } 965 )