github.com/gwaycc/gometalinter@v3.0.0+incompatible/_linters/src/honnef.co/go/tools/lint/lint.go (about) 1 // Package lint provides the foundation for tools like staticcheck 2 package lint // import "honnef.co/go/tools/lint" 3 4 import ( 5 "fmt" 6 "go/ast" 7 "go/token" 8 "go/types" 9 "io" 10 "os" 11 "path/filepath" 12 "sort" 13 "strings" 14 "sync" 15 "time" 16 "unicode" 17 18 "golang.org/x/tools/go/packages" 19 "honnef.co/go/tools/config" 20 "honnef.co/go/tools/ssa" 21 "honnef.co/go/tools/ssa/ssautil" 22 ) 23 24 type Job struct { 25 Program *Program 26 27 checker string 28 check Check 29 problems []Problem 30 31 duration time.Duration 32 } 33 34 type Ignore interface { 35 Match(p Problem) bool 36 } 37 38 type LineIgnore struct { 39 File string 40 Line int 41 Checks []string 42 matched bool 43 pos token.Pos 44 } 45 46 func (li *LineIgnore) Match(p Problem) bool { 47 if p.Position.Filename != li.File || p.Position.Line != li.Line { 48 return false 49 } 50 for _, c := range li.Checks { 51 if m, _ := filepath.Match(c, p.Check); m { 52 li.matched = true 53 return true 54 } 55 } 56 return false 57 } 58 59 func (li *LineIgnore) String() string { 60 matched := "not matched" 61 if li.matched { 62 matched = "matched" 63 } 64 return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) 65 } 66 67 type FileIgnore struct { 68 File string 69 Checks []string 70 } 71 72 func (fi *FileIgnore) Match(p Problem) bool { 73 if p.Position.Filename != fi.File { 74 return false 75 } 76 for _, c := range fi.Checks { 77 if m, _ := filepath.Match(c, p.Check); m { 78 return true 79 } 80 } 81 return false 82 } 83 84 type GlobIgnore struct { 85 Pattern string 86 Checks []string 87 } 88 89 func (gi *GlobIgnore) Match(p Problem) bool { 90 if gi.Pattern != "*" { 91 pkgpath := p.Package.Types.Path() 92 if strings.HasSuffix(pkgpath, "_test") { 93 pkgpath = pkgpath[:len(pkgpath)-len("_test")] 94 } 95 name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename)) 96 if m, _ := filepath.Match(gi.Pattern, name); !m { 97 return false 98 } 99 } 100 for _, c := range gi.Checks { 101 if m, _ := filepath.Match(c, p.Check); m { 102 return true 103 } 104 } 105 return false 106 } 107 108 type Program struct { 109 SSA *ssa.Program 110 InitialPackages []*Pkg 111 InitialFunctions []*ssa.Function 112 AllPackages []*packages.Package 113 AllFunctions []*ssa.Function 114 Files []*ast.File 115 GoVersion int 116 117 tokenFileMap map[*token.File]*ast.File 118 astFileMap map[*ast.File]*Pkg 119 packagesMap map[string]*packages.Package 120 121 genMu sync.RWMutex 122 generatedMap map[string]bool 123 } 124 125 func (prog *Program) Fset() *token.FileSet { 126 return prog.InitialPackages[0].Fset 127 } 128 129 type Func func(*Job) 130 131 type Severity uint8 132 133 const ( 134 Error Severity = iota 135 Warning 136 Ignored 137 ) 138 139 // Problem represents a problem in some source code. 140 type Problem struct { 141 Position token.Position // position in source file 142 Text string // the prose that describes the problem 143 Check string 144 Checker string 145 Package *Pkg 146 Severity Severity 147 } 148 149 func (p *Problem) String() string { 150 if p.Check == "" { 151 return p.Text 152 } 153 return fmt.Sprintf("%s (%s)", p.Text, p.Check) 154 } 155 156 type Checker interface { 157 Name() string 158 Prefix() string 159 Init(*Program) 160 Checks() []Check 161 } 162 163 type Check struct { 164 Fn Func 165 ID string 166 FilterGenerated bool 167 } 168 169 // A Linter lints Go source code. 170 type Linter struct { 171 Checkers []Checker 172 Ignores []Ignore 173 GoVersion int 174 ReturnIgnored bool 175 Config config.Config 176 177 MaxConcurrentJobs int 178 PrintStats bool 179 180 automaticIgnores []Ignore 181 } 182 183 func (l *Linter) ignore(p Problem) bool { 184 ignored := false 185 for _, ig := range l.automaticIgnores { 186 // We cannot short-circuit these, as we want to record, for 187 // each ignore, whether it matched or not. 188 if ig.Match(p) { 189 ignored = true 190 } 191 } 192 if ignored { 193 // no need to execute other ignores if we've already had a 194 // match. 195 return true 196 } 197 for _, ig := range l.Ignores { 198 // We can short-circuit here, as we aren't tracking any 199 // information. 200 if ig.Match(p) { 201 return true 202 } 203 } 204 205 return false 206 } 207 208 func (prog *Program) File(node Positioner) *ast.File { 209 return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())] 210 } 211 212 func (j *Job) File(node Positioner) *ast.File { 213 return j.Program.File(node) 214 } 215 216 func parseDirective(s string) (cmd string, args []string) { 217 if !strings.HasPrefix(s, "//lint:") { 218 return "", nil 219 } 220 s = strings.TrimPrefix(s, "//lint:") 221 fields := strings.Split(s, " ") 222 return fields[0], fields[1:] 223 } 224 225 type PerfStats struct { 226 PackageLoading time.Duration 227 SSABuild time.Duration 228 OtherInitWork time.Duration 229 CheckerInits map[string]time.Duration 230 Jobs []JobStat 231 } 232 233 type JobStat struct { 234 Job string 235 Duration time.Duration 236 } 237 238 func (stats *PerfStats) Print(w io.Writer) { 239 fmt.Fprintln(w, "Package loading:", stats.PackageLoading) 240 fmt.Fprintln(w, "SSA build:", stats.SSABuild) 241 fmt.Fprintln(w, "Other init work:", stats.OtherInitWork) 242 243 fmt.Fprintln(w, "Checker inits:") 244 for checker, d := range stats.CheckerInits { 245 fmt.Fprintf(w, "\t%s: %s\n", checker, d) 246 } 247 fmt.Fprintln(w) 248 249 fmt.Fprintln(w, "Jobs:") 250 sort.Slice(stats.Jobs, func(i, j int) bool { 251 return stats.Jobs[i].Duration < stats.Jobs[j].Duration 252 }) 253 var total time.Duration 254 for _, job := range stats.Jobs { 255 fmt.Fprintf(w, "\t%s: %s\n", job.Job, job.Duration) 256 total += job.Duration 257 } 258 fmt.Fprintf(w, "\tTotal: %s\n", total) 259 } 260 261 func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { 262 allPkgs := allPackages(initial) 263 t := time.Now() 264 ssaprog, _ := ssautil.Packages(allPkgs, ssa.GlobalDebug) 265 ssaprog.Build() 266 if stats != nil { 267 stats.SSABuild = time.Since(t) 268 } 269 270 t = time.Now() 271 pkgMap := map[*ssa.Package]*Pkg{} 272 var pkgs []*Pkg 273 for _, pkg := range initial { 274 ssapkg := ssaprog.Package(pkg.Types) 275 var cfg config.Config 276 if len(pkg.GoFiles) != 0 { 277 path := pkg.GoFiles[0] 278 dir := filepath.Dir(path) 279 var err error 280 // OPT(dh): we're rebuilding the entire config tree for 281 // each package. for example, if we check a/b/c and 282 // a/b/c/d, we'll process a, a/b, a/b/c, a, a/b, a/b/c, 283 // a/b/c/d – we should cache configs per package and only 284 // load the new levels. 285 cfg, err = config.Load(dir) 286 if err != nil { 287 // FIXME(dh): we couldn't load the config, what are we 288 // supposed to do? probably tell the user somehow 289 } 290 cfg = cfg.Merge(l.Config) 291 } 292 293 pkg := &Pkg{ 294 SSA: ssapkg, 295 Package: pkg, 296 Config: cfg, 297 } 298 pkgMap[ssapkg] = pkg 299 pkgs = append(pkgs, pkg) 300 } 301 302 prog := &Program{ 303 SSA: ssaprog, 304 InitialPackages: pkgs, 305 AllPackages: allPkgs, 306 GoVersion: l.GoVersion, 307 tokenFileMap: map[*token.File]*ast.File{}, 308 astFileMap: map[*ast.File]*Pkg{}, 309 generatedMap: map[string]bool{}, 310 } 311 prog.packagesMap = map[string]*packages.Package{} 312 for _, pkg := range allPkgs { 313 prog.packagesMap[pkg.Types.Path()] = pkg 314 } 315 316 isInitial := map[*types.Package]struct{}{} 317 for _, pkg := range pkgs { 318 isInitial[pkg.Types] = struct{}{} 319 } 320 for fn := range ssautil.AllFunctions(ssaprog) { 321 if fn.Pkg == nil { 322 continue 323 } 324 prog.AllFunctions = append(prog.AllFunctions, fn) 325 if _, ok := isInitial[fn.Pkg.Pkg]; ok { 326 prog.InitialFunctions = append(prog.InitialFunctions, fn) 327 } 328 } 329 for _, pkg := range pkgs { 330 prog.Files = append(prog.Files, pkg.Syntax...) 331 332 ssapkg := ssaprog.Package(pkg.Types) 333 for _, f := range pkg.Syntax { 334 prog.astFileMap[f] = pkgMap[ssapkg] 335 } 336 } 337 338 for _, pkg := range allPkgs { 339 for _, f := range pkg.Syntax { 340 tf := pkg.Fset.File(f.Pos()) 341 prog.tokenFileMap[tf] = f 342 } 343 } 344 345 var out []Problem 346 l.automaticIgnores = nil 347 for _, pkg := range initial { 348 for _, f := range pkg.Syntax { 349 cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) 350 for node, cgs := range cm { 351 for _, cg := range cgs { 352 for _, c := range cg.List { 353 if !strings.HasPrefix(c.Text, "//lint:") { 354 continue 355 } 356 cmd, args := parseDirective(c.Text) 357 switch cmd { 358 case "ignore", "file-ignore": 359 if len(args) < 2 { 360 // FIXME(dh): this causes duplicated warnings when using megacheck 361 p := Problem{ 362 Position: prog.DisplayPosition(c.Pos()), 363 Text: "malformed linter directive; missing the required reason field?", 364 Check: "", 365 Checker: "lint", 366 Package: nil, 367 } 368 out = append(out, p) 369 continue 370 } 371 default: 372 // unknown directive, ignore 373 continue 374 } 375 checks := strings.Split(args[0], ",") 376 pos := prog.DisplayPosition(node.Pos()) 377 var ig Ignore 378 switch cmd { 379 case "ignore": 380 ig = &LineIgnore{ 381 File: pos.Filename, 382 Line: pos.Line, 383 Checks: checks, 384 pos: c.Pos(), 385 } 386 case "file-ignore": 387 ig = &FileIgnore{ 388 File: pos.Filename, 389 Checks: checks, 390 } 391 } 392 l.automaticIgnores = append(l.automaticIgnores, ig) 393 } 394 } 395 } 396 } 397 } 398 399 sizes := struct { 400 types int 401 defs int 402 uses int 403 implicits int 404 selections int 405 scopes int 406 }{} 407 for _, pkg := range pkgs { 408 sizes.types += len(pkg.TypesInfo.Types) 409 sizes.defs += len(pkg.TypesInfo.Defs) 410 sizes.uses += len(pkg.TypesInfo.Uses) 411 sizes.implicits += len(pkg.TypesInfo.Implicits) 412 sizes.selections += len(pkg.TypesInfo.Selections) 413 sizes.scopes += len(pkg.TypesInfo.Scopes) 414 } 415 416 if stats != nil { 417 stats.OtherInitWork = time.Since(t) 418 } 419 420 for _, checker := range l.Checkers { 421 t := time.Now() 422 checker.Init(prog) 423 if stats != nil { 424 stats.CheckerInits[checker.Name()] = time.Since(t) 425 } 426 } 427 428 var jobs []*Job 429 var allChecks []string 430 431 for _, checker := range l.Checkers { 432 checks := checker.Checks() 433 for _, check := range checks { 434 allChecks = append(allChecks, check.ID) 435 j := &Job{ 436 Program: prog, 437 checker: checker.Name(), 438 check: check, 439 } 440 jobs = append(jobs, j) 441 } 442 } 443 444 max := len(jobs) 445 if l.MaxConcurrentJobs > 0 { 446 max = l.MaxConcurrentJobs 447 } 448 449 sem := make(chan struct{}, max) 450 wg := &sync.WaitGroup{} 451 for _, j := range jobs { 452 wg.Add(1) 453 go func(j *Job) { 454 defer wg.Done() 455 sem <- struct{}{} 456 defer func() { <-sem }() 457 fn := j.check.Fn 458 if fn == nil { 459 return 460 } 461 t := time.Now() 462 fn(j) 463 j.duration = time.Since(t) 464 }(j) 465 } 466 wg.Wait() 467 468 for _, j := range jobs { 469 if stats != nil { 470 stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration}) 471 } 472 for _, p := range j.problems { 473 allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks) 474 475 if l.ignore(p) { 476 p.Severity = Ignored 477 } 478 // TODO(dh): support globs in check white/blacklist 479 // OPT(dh): this approach doesn't actually disable checks, 480 // it just discards their results. For the moment, that's 481 // fine. None of our checks are super expensive. In the 482 // future, we may want to provide opt-in expensive 483 // analysis, which shouldn't run at all. It may be easiest 484 // to implement this in the individual checks. 485 if (l.ReturnIgnored || p.Severity != Ignored) && allowedChecks[p.Check] { 486 out = append(out, p) 487 } 488 } 489 } 490 491 for _, ig := range l.automaticIgnores { 492 ig, ok := ig.(*LineIgnore) 493 if !ok { 494 continue 495 } 496 if ig.matched { 497 continue 498 } 499 500 couldveMatched := false 501 for f, pkg := range prog.astFileMap { 502 if prog.Fset().Position(f.Pos()).Filename != ig.File { 503 continue 504 } 505 allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) 506 for _, c := range ig.Checks { 507 if !allowedChecks[c] { 508 continue 509 } 510 couldveMatched = true 511 break 512 } 513 break 514 } 515 516 if !couldveMatched { 517 // The ignored checks were disabled for the containing package. 518 // Don't flag the ignore for not having matched. 519 continue 520 } 521 p := Problem{ 522 Position: prog.DisplayPosition(ig.pos), 523 Text: "this linter directive didn't match anything; should it be removed?", 524 Check: "", 525 Checker: "lint", 526 Package: nil, 527 } 528 out = append(out, p) 529 } 530 531 sort.Slice(out, func(i int, j int) bool { 532 pi, pj := out[i].Position, out[j].Position 533 534 if pi.Filename != pj.Filename { 535 return pi.Filename < pj.Filename 536 } 537 if pi.Line != pj.Line { 538 return pi.Line < pj.Line 539 } 540 if pi.Column != pj.Column { 541 return pi.Column < pj.Column 542 } 543 544 return out[i].Text < out[j].Text 545 }) 546 547 if l.PrintStats && stats != nil { 548 stats.Print(os.Stderr) 549 } 550 551 if len(out) < 2 { 552 return out 553 } 554 555 uniq := make([]Problem, 0, len(out)) 556 uniq = append(uniq, out[0]) 557 prev := out[0] 558 for _, p := range out[1:] { 559 if prev.Position == p.Position && prev.Text == p.Text { 560 continue 561 } 562 prev = p 563 uniq = append(uniq, p) 564 } 565 566 return uniq 567 } 568 569 func FilterChecks(allChecks []string, checks []string) map[string]bool { 570 // OPT(dh): this entire computation could be cached per package 571 allowedChecks := map[string]bool{} 572 573 for _, check := range checks { 574 b := true 575 if len(check) > 1 && check[0] == '-' { 576 b = false 577 check = check[1:] 578 } 579 if check == "*" || check == "all" { 580 // Match all 581 for _, c := range allChecks { 582 allowedChecks[c] = b 583 } 584 } else if strings.HasSuffix(check, "*") { 585 // Glob 586 prefix := check[:len(check)-1] 587 isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 588 589 for _, c := range allChecks { 590 idx := strings.IndexFunc(c, func(r rune) bool { return unicode.IsNumber(r) }) 591 if isCat { 592 // Glob is S*, which should match S1000 but not SA1000 593 cat := c[:idx] 594 if prefix == cat { 595 allowedChecks[c] = b 596 } 597 } else { 598 // Glob is S1* 599 if strings.HasPrefix(c, prefix) { 600 allowedChecks[c] = b 601 } 602 } 603 } 604 } else { 605 // Literal check name 606 allowedChecks[check] = b 607 } 608 } 609 return allowedChecks 610 } 611 612 func (prog *Program) Package(path string) *packages.Package { 613 return prog.packagesMap[path] 614 } 615 616 // Pkg represents a package being linted. 617 type Pkg struct { 618 SSA *ssa.Package 619 *packages.Package 620 Config config.Config 621 } 622 623 type Positioner interface { 624 Pos() token.Pos 625 } 626 627 func (prog *Program) DisplayPosition(p token.Pos) token.Position { 628 // Only use the adjusted position if it points to another Go file. 629 // This means we'll point to the original file for cgo files, but 630 // we won't point to a YACC grammar file. 631 632 pos := prog.Fset().PositionFor(p, false) 633 adjPos := prog.Fset().PositionFor(p, true) 634 635 if filepath.Ext(adjPos.Filename) == ".go" { 636 return adjPos 637 } 638 return pos 639 } 640 641 func (prog *Program) isGenerated(path string) bool { 642 // This function isn't very efficient in terms of lock contention 643 // and lack of parallelism, but it really shouldn't matter. 644 // Projects consists of thousands of files, and have hundreds of 645 // errors. That's not a lot of calls to isGenerated. 646 647 prog.genMu.RLock() 648 if b, ok := prog.generatedMap[path]; ok { 649 prog.genMu.RUnlock() 650 return b 651 } 652 prog.genMu.RUnlock() 653 prog.genMu.Lock() 654 defer prog.genMu.Unlock() 655 // recheck to avoid doing extra work in case of race 656 if b, ok := prog.generatedMap[path]; ok { 657 return b 658 } 659 660 f, err := os.Open(path) 661 if err != nil { 662 return false 663 } 664 defer f.Close() 665 b := isGenerated(f) 666 prog.generatedMap[path] = b 667 return b 668 } 669 670 func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { 671 tf := j.Program.SSA.Fset.File(n.Pos()) 672 f := j.Program.tokenFileMap[tf] 673 pkg := j.Program.astFileMap[f] 674 675 pos := j.Program.DisplayPosition(n.Pos()) 676 if j.Program.isGenerated(pos.Filename) && j.check.FilterGenerated { 677 return nil 678 } 679 problem := Problem{ 680 Position: pos, 681 Text: fmt.Sprintf(format, args...), 682 Check: j.check.ID, 683 Checker: j.checker, 684 Package: pkg, 685 } 686 j.problems = append(j.problems, problem) 687 return &j.problems[len(j.problems)-1] 688 } 689 690 func (j *Job) NodePackage(node Positioner) *Pkg { 691 f := j.File(node) 692 return j.Program.astFileMap[f] 693 } 694 695 func allPackages(pkgs []*packages.Package) []*packages.Package { 696 var out []*packages.Package 697 packages.Visit( 698 pkgs, 699 func(pkg *packages.Package) bool { 700 out = append(out, pkg) 701 return true 702 }, 703 nil, 704 ) 705 return out 706 }