modernc.org/gc@v1.0.1-0.20240304020402-f0dba7c97c2b/all_test.go (about)

     1  // Copyright 2016 The GC Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc // import "modernc.org/gc"
     6  
     7  import (
     8  	"bufio"
     9  	"bytes"
    10  	"flag"
    11  	"fmt"
    12  	"go/ast"
    13  	"go/build"
    14  	"go/importer"
    15  	goparser "go/parser"
    16  	"go/scanner"
    17  	gotoken "go/token"
    18  	"go/types"
    19  	"io/ioutil"
    20  	"os"
    21  	"path"
    22  	"path/filepath"
    23  	"reflect"
    24  	"regexp"
    25  	"runtime"
    26  	"runtime/debug"
    27  	"sort"
    28  	"strings"
    29  	"sync"
    30  	"sync/atomic"
    31  	"testing"
    32  
    33  	"github.com/edsrzf/mmap-go"
    34  	"modernc.org/lex"
    35  	dfa "modernc.org/lexer"
    36  	"modernc.org/mathutil"
    37  	"modernc.org/sortutil"
    38  	"modernc.org/strutil"
    39  	"modernc.org/token"
    40  	"modernc.org/y"
    41  )
    42  
    43  func caller(s string, va ...interface{}) {
    44  	if s == "" {
    45  		s = strings.Repeat("%v ", len(va))
    46  	}
    47  	_, fn, fl, _ := runtime.Caller(2)
    48  	fmt.Fprintf(os.Stderr, "// caller: %s:%d: ", path.Base(fn), fl)
    49  	fmt.Fprintf(os.Stderr, s, va...)
    50  	fmt.Fprintln(os.Stderr)
    51  	_, fn, fl, _ = runtime.Caller(1)
    52  	fmt.Fprintf(os.Stderr, "// \tcallee: %s:%d: ", path.Base(fn), fl)
    53  	fmt.Fprintln(os.Stderr)
    54  	os.Stderr.Sync()
    55  }
    56  
    57  var dbgMu sync.Mutex
    58  
    59  func dbg(s string, va ...interface{}) {
    60  	dbgMu.Lock()
    61  	defer dbgMu.Unlock()
    62  
    63  	if s == "" {
    64  		s = strings.Repeat("%v ", len(va))
    65  	}
    66  	_, fn, fl, _ := runtime.Caller(1)
    67  	fmt.Fprintf(os.Stderr, "// dbg %s:%d: ", path.Base(fn), fl)
    68  	fmt.Fprintf(os.Stderr, s, va...)
    69  	fmt.Fprintln(os.Stderr)
    70  	os.Stderr.Sync()
    71  }
    72  
    73  func TODO(s string, args ...interface{}) string { //TODOOK
    74  	_, fn, fl, _ := runtime.Caller(1)
    75  	return fmt.Sprintf("// XTODO: %s:%d: %v\n", path.Base(fn), fl, fmt.Sprintf(s, args...)) //TODOOK
    76  }
    77  
    78  func stack() []byte { return debug.Stack() }
    79  
    80  func use(...interface{}) {}
    81  
    82  func init() {
    83  	use(caller, dbg, TODO, (*parser).todo, stack) //TODOOK
    84  	_, file, _, ok := runtime.Caller(0)
    85  	if !ok {
    86  		panic("internal error 001: cannot determine self import path")
    87  	}
    88  
    89  	gopaths := filepath.SplitList(os.Getenv("GOPATH"))
    90  	for _, v := range gopaths {
    91  		gp := filepath.Join(v, "src")
    92  		path, err := filepath.Rel(gp, file)
    93  		if err != nil {
    94  			continue
    95  		}
    96  
    97  		selfImportPath = filepath.Dir(path)
    98  		return
    99  	}
   100  
   101  	panic("internal error 002: cannot determine self import path")
   102  }
   103  
   104  var (
   105  	printHooks = strutil.PrettyPrintHooks{
   106  		reflect.TypeOf(Token{}): func(f strutil.Formatter, v interface{}, prefix, suffix string) {
   107  			t := v.(Token)
   108  			f.Format(prefix)
   109  			f.Format("%s: %q", t.Position(), t.Val)
   110  			f.Format(suffix)
   111  		},
   112  	}
   113  
   114  	printHooks2 = strutil.PrettyPrintHooks{
   115  		reflect.TypeOf(Token{}): func(f strutil.Formatter, v interface{}, prefix, suffix string) {
   116  			t := v.(Token)
   117  			f.Format(prefix)
   118  			pos := t.Position()
   119  			pos.Filename = filepath.Base(pos.Filename)
   120  			if pos.Filename == "." {
   121  				pos.Filename = ""
   122  			}
   123  			f.Format("%s: %q", pos, t.Val)
   124  			f.Format(suffix)
   125  		},
   126  
   127  		reflect.TypeOf(Ident{}): func(f strutil.Formatter, v interface{}, prefix, suffix string) {
   128  			t := v.(Ident).Token
   129  			f.Format(prefix)
   130  			pos := t.Position()
   131  			pos.Filename = filepath.Base(pos.Filename)
   132  			if pos.Filename == "." {
   133  				pos.Filename = ""
   134  			}
   135  			f.Format("%s: %q", pos, t.Val)
   136  			f.Format(suffix)
   137  		},
   138  		reflect.TypeOf(TypeKind(0)): func(f strutil.Formatter, v interface{}, prefix, suffix string) {
   139  			t := v.(TypeKind)
   140  			f.Format(prefix)
   141  			f.Format("%v", t)
   142  			f.Format(suffix)
   143  		},
   144  		reflect.TypeOf(ChanDir(0)): func(f strutil.Formatter, v interface{}, prefix, suffix string) {
   145  			t := v.(ChanDir)
   146  			f.Format(prefix)
   147  			f.Format("%v", t)
   148  			f.Format(suffix)
   149  		},
   150  	}
   151  )
   152  
   153  func pretty(v interface{}) string  { return strutil.PrettyString(v, "", "", printHooks) }
   154  func pretty2(v interface{}) string { return strutil.PrettyString(v, "", "", printHooks2) }
   155  
   156  // ============================================================================
   157  
   158  const (
   159  	lexFile   = "testdata/scanner/scanner.l"
   160  	yaccCover = "testdata/parser/ycover.go"
   161  	yaccFile  = "testdata/parser/parser.y"
   162  )
   163  
   164  type yParser struct {
   165  	*y.Parser
   166  	reports   [][]byte
   167  	terminals []*y.Symbol
   168  	tok2sym   map[gotoken.Token]*y.Symbol
   169  }
   170  
   171  var (
   172  	_         = flag.Bool("closures", false, "closures")        //TODOOK
   173  	_         = flag.String("out", "", "where to put y.output") //TODOOK
   174  	oN        = flag.Int("N", -1, "")
   175  	oNoErrchk = flag.Bool("noerrchk", false, "")
   176  	oRE       = flag.String("re", "", "regexp")
   177  	oTrc      = flag.Bool("trc", false, "trace")
   178  
   179  	re *regexp.Regexp
   180  
   181  	selfImportPath string
   182  
   183  	errCheckDisbled     = []byte("////")
   184  	errCheckMark1       = []byte("// ERROR ")
   185  	errCheckMark2       = []byte("// GC_ERROR ")
   186  	errCheckPatterns    = regexp.MustCompile(`"([^"]*)"`)
   187  	generalCommentEnd   = []byte("*/")
   188  	generalCommentStart = []byte("/*")
   189  
   190  	lexL = func() *lex.L {
   191  		lf, err := os.Open(lexFile)
   192  		if err != nil {
   193  			panic(err)
   194  		}
   195  
   196  		defer lf.Close()
   197  
   198  		l, err := lex.NewL(lexFile, bufio.NewReader(lf), false, false)
   199  		if err != nil {
   200  			panic(err)
   201  		}
   202  
   203  		return l
   204  	}()
   205  
   206  	yp0 = func() *yParser {
   207  		var closures bool
   208  		var fn string
   209  		for i, v := range os.Args {
   210  			if i == 0 {
   211  				continue
   212  			}
   213  
   214  			switch v {
   215  			case "-closures":
   216  				closures = true
   217  			case "-out":
   218  				fn = os.Args[i+1]
   219  			}
   220  		}
   221  		fset := gotoken.NewFileSet()
   222  		var out bytes.Buffer
   223  		p, err := y.ProcessFile(fset, yaccFile, &y.Options{
   224  			Closures:  closures,
   225  			Reducible: true,
   226  			Report:    &out,
   227  		})
   228  		if fn != "" {
   229  			if err := ioutil.WriteFile(fn, out.Bytes(), 0644); err != nil {
   230  				panic(err)
   231  			}
   232  		}
   233  
   234  		if err != nil {
   235  			panic(err)
   236  		}
   237  
   238  		reports := make([][]byte, len(p.States))
   239  		rep := out.Bytes()
   240  		sep := []byte("\ns") // "\nstate "
   241  		s := 0
   242  		for i := range reports {
   243  			e := bytes.Index(rep[s:], sep)
   244  			if e < 0 {
   245  				e = len(rep[s:]) - 1
   246  			}
   247  			reports[i] = rep[s : s+e]
   248  			s = s + e + 1
   249  		}
   250  
   251  		m := make(map[gotoken.Token]*y.Symbol, len(p.Syms))
   252  		for k, v := range p.Syms {
   253  			if !v.IsTerminal || k == "BODY" || k[0] == '_' {
   254  				continue
   255  			}
   256  
   257  			switch {
   258  			case k[0] >= 'A' && k[0] <= 'Z':
   259  				if tok, ok := str2token[k]; ok {
   260  					m[tok] = v
   261  					break
   262  				}
   263  
   264  				l := v.LiteralString
   265  				if l == "" {
   266  					panic(fmt.Errorf("no token for %q", k))
   267  				}
   268  
   269  				if tok, ok := str2token[l[1:len(l)-1]]; ok {
   270  					m[tok] = v
   271  					break
   272  				}
   273  
   274  				panic(k)
   275  			case k[0] == '\'':
   276  				tok := str2token[k[1:2]]
   277  				m[tok] = v
   278  			default:
   279  			}
   280  		}
   281  		m[gotoken.EOF] = p.Syms["$end"]
   282  		m[tokenBODY] = p.Syms["BODY"]
   283  		var t []*y.Symbol
   284  		for _, v := range p.Syms {
   285  			if v.IsTerminal {
   286  				t = append(t, v)
   287  			}
   288  		}
   289  		return &yParser{
   290  			Parser:    p,
   291  			reports:   reports,
   292  			terminals: t,
   293  			tok2sym:   m,
   294  		}
   295  	}()
   296  
   297  	str2token = func() map[string]gotoken.Token {
   298  		m := map[string]gotoken.Token{}
   299  		for i := gotoken.IDENT; i <= maxTokenToken; i++ {
   300  			s := strings.ToUpper(i.String())
   301  			if _, ok := m[s]; ok {
   302  				panic(fmt.Errorf("internal error 003: %q", s))
   303  			}
   304  
   305  			m[s] = i
   306  		}
   307  		m["ILLEGAL"] = gotoken.ILLEGAL
   308  		m["«"] = tokenLTLT
   309  		m["»"] = tokenGTGT
   310  		return m
   311  	}()
   312  
   313  	gorootPackages []*Package
   314  
   315  	gorootFiles = func() (r []string) {
   316  		if err := filepath.Walk(filepath.Join(runtime.GOROOT(), "src"), func(path string, info os.FileInfo, err error) error {
   317  			if err != nil {
   318  				return err
   319  			}
   320  
   321  			if !info.IsDir() {
   322  				return nil
   323  			}
   324  
   325  			if base := filepath.Base(path); strings.HasPrefix(base, ".") ||
   326  				strings.HasPrefix(base, "_") ||
   327  				base == "testdata" {
   328  				return filepath.SkipDir
   329  			}
   330  
   331  			p, err := build.ImportDir(path, 0)
   332  			if err != nil {
   333  				if _, ok := err.(*build.NoGoError); ok {
   334  					return nil
   335  				}
   336  
   337  				return err
   338  			}
   339  
   340  			if len(p.GoFiles) == 0 && strings.Contains(p.ImportPath, "/internal/") { // ImportDir does not return the file list for internal packages
   341  				return nil
   342  			}
   343  
   344  			ctx := &Context{}
   345  			pkg := newPackage(ctx, p.ImportPath, p.Name, nil)
   346  			for _, v := range p.GoFiles {
   347  				path := filepath.Join(p.Dir, v)
   348  				r = append(r, path)
   349  				pkg.SourceFiles = append(pkg.SourceFiles, newSourceFile(pkg, path, nil, nil))
   350  			}
   351  			gorootPackages = append(gorootPackages, pkg)
   352  			for _, v := range p.TestGoFiles {
   353  				r = append(r, filepath.Join(p.Dir, v))
   354  			}
   355  			return nil
   356  		}); err != nil {
   357  			panic(err)
   358  		}
   359  
   360  		gorootPackages = gorootPackages[:len(gorootPackages):len(gorootPackages)]
   361  		return r[:len(r):len(r)]
   362  	}()
   363  
   364  	errchkFiles = func() (r []string) {
   365  		if err := filepath.Walk(filepath.Join(runtime.GOROOT(), "test"), func(path string, info os.FileInfo, err error) error {
   366  			if err != nil {
   367  				return err
   368  			}
   369  
   370  			if info.IsDir() {
   371  				return nil
   372  			}
   373  
   374  			if base := filepath.Base(path); strings.HasPrefix(base, ".") ||
   375  				!strings.HasSuffix(base, ".go") {
   376  				return nil
   377  			}
   378  
   379  			r = append(r, path)
   380  			return nil
   381  		}); err != nil {
   382  			panic(err)
   383  		}
   384  
   385  		return r[:len(r):len(r)]
   386  	}()
   387  
   388  	stdLibPackages []*Package
   389  
   390  	stdLibFiles = func() (r []string) {
   391  		cmd := filepath.Join(runtime.GOROOT(), "src", "cmd")
   392  		for _, v := range gorootFiles {
   393  			if !strings.Contains(v, cmd) && !strings.HasSuffix(v, "_test.go") {
   394  				r = append(r, v)
   395  			}
   396  		}
   397  		for _, v := range gorootPackages {
   398  			if len(v.SourceFiles) == 0 { //TODO-
   399  				panic(fmt.Errorf("%+v", v))
   400  			}
   401  			if !strings.HasPrefix(v.SourceFiles[0].Path, cmd) {
   402  				stdLibPackages = append(stdLibPackages, v)
   403  			}
   404  		}
   405  		stdLibPackages = stdLibPackages[:len(stdLibPackages):len(stdLibPackages)]
   406  		return r[:len(r):len(r)]
   407  	}()
   408  )
   409  
   410  func errString(err error) string {
   411  	var b bytes.Buffer
   412  	scanner.PrintError(&b, err)
   413  	return strings.TrimSpace(b.String())
   414  }
   415  
   416  func testScannerStates(t *testing.T) {
   417  	mn := len(lexL.Dfa)
   418  	mn0 := mn
   419  	m := make([]bool, mn+1) // 1-based state.Index.
   420  	fset2 := gotoken.NewFileSet()
   421  	var ss scanner.Scanner
   422  	l := newLexer(nil, nil)
   423  	nerr := 0
   424  
   425  	var cases, sum int
   426  	var f func(string, *dfa.NfaState)
   427  	f = func(prefix string, s *dfa.NfaState) {
   428  		if nerr >= 10 {
   429  			return
   430  		}
   431  
   432  		if m[s.Index] {
   433  			return
   434  		}
   435  
   436  		m[s.Index] = true
   437  
   438  		if len(s.NonConsuming) != 0 {
   439  			panic("internal error 004")
   440  		}
   441  
   442  		next := make([]*dfa.NfaState, classNext)
   443  		for _, e := range s.Consuming {
   444  			switch x := e.(type) {
   445  			case *dfa.RangesEdge:
   446  				if x.Invert {
   447  					panic("internal error 005")
   448  				}
   449  
   450  				for _, v := range x.Ranges.R16 {
   451  					for c := v.Lo; c <= v.Hi; c += v.Stride {
   452  						if c >= classNext {
   453  							continue
   454  						}
   455  
   456  						if next[c] != nil {
   457  							panic("internal error 006")
   458  						}
   459  
   460  						next[c] = x.Targ
   461  					}
   462  				}
   463  				for _, v := range x.Ranges.R32 {
   464  					for c := v.Lo; c <= v.Hi; c += v.Stride {
   465  						if c >= classNext {
   466  							continue
   467  						}
   468  
   469  						if next[c] != nil {
   470  							panic("internal error 007")
   471  						}
   472  
   473  						next[c] = x.Targ
   474  					}
   475  				}
   476  			case *dfa.RuneEdge:
   477  				c := x.Rune
   478  				if c >= classNext {
   479  					continue
   480  				}
   481  
   482  				if next[c] != nil {
   483  					panic("internal error 008")
   484  				}
   485  
   486  				next[c] = x.Targ
   487  			default:
   488  				panic(fmt.Errorf("internal error 009: %T", x))
   489  			}
   490  		}
   491  		for c, nx := range next {
   492  			iCase := cases
   493  			cases++
   494  			src := prefix
   495  			switch c {
   496  			case classEOF:
   497  				// nop
   498  			case classNonASCII:
   499  				src += "á"
   500  			case classLTLT, classGTGT, classBOM:
   501  				src += "@"
   502  			default:
   503  				src += string(rune(c))
   504  			}
   505  
   506  			fi := token.NewFile("", len(src))
   507  			fi2 := fset2.AddFile("", -1, len(src))
   508  			errCnt := 0
   509  			b := []byte(src)
   510  			var errs, errs2 scanner.ErrorList
   511  			l.init(fi, b)
   512  			l.errHandler = func(position token.Position, msg string, args ...interface{}) {
   513  				errCnt++
   514  				errs.Add(gotoken.Position(position), fmt.Sprintf(msg, args...))
   515  			}
   516  			ss.Init(fi2, b, func(pos gotoken.Position, msg string) {
   517  				errs2.Add(pos, msg)
   518  			}, 0)
   519  			sum += len(src)
   520  			for i := 0; nerr <= 10; i++ {
   521  				errs = nil
   522  				errs2 = nil
   523  				l.errorCount = 0
   524  				ss.ErrorCount = 0
   525  				off, tok := l.Scan()
   526  				pos := l.position(off)
   527  				lit := string(l.lit)
   528  				p2, tok2, lit2 := ss.Scan()
   529  				pos2 := fi2.Position(p2)
   530  				if n := *oN; n < 0 || n == iCase {
   531  					if g, e := l.errorCount != 0, ss.ErrorCount != 0; g != e {
   532  						nerr++
   533  						t.Errorf(
   534  							"%6d: errors[%d] %q(|% x|), %v %v\nall got\n%s\nall exp\n%s",
   535  							iCase, i, src, src, l.errorCount, ss.ErrorCount, errString(errs), errString(errs2),
   536  						)
   537  					}
   538  					if g, e := pos.Filename, pos2.Filename; g != e {
   539  						nerr++
   540  						t.Errorf("%6d: pos.Filename[%d] %q(|% x|), %v %v (%v %v)", iCase, i, src, src, g, e, pos, pos2)
   541  					}
   542  					if g, e := pos.Line, pos2.Line; g != e {
   543  						nerr++
   544  						t.Errorf("%6d: pos.Line[%d] %q(|% x|), %v %v (%v %v)", iCase, i, src, src, g, e, pos, pos2)
   545  					}
   546  					if g, e := pos.Column, pos2.Column; g != e {
   547  						nerr++
   548  						t.Errorf("%6d: pos.Column[%d] %q(|% x|), %v %v (%v %v)", iCase, i, src, src, g, e, pos, pos2)
   549  					}
   550  					if g, e := pos.Offset, pos2.Offset; g != e {
   551  						nerr++
   552  						t.Errorf("%6d: pos.Offset[%d] %q(|% x|), %v %v (%v %v)", iCase, i, src, src, g, e, pos, pos2)
   553  					}
   554  					if g, e := tok, tok2; g != e {
   555  						// Whitelist cases like "f..3".
   556  						if g == gotoken.PERIOD && e == gotoken.ILLEGAL && strings.Contains(src, "..") {
   557  							break
   558  						}
   559  
   560  						nerr++
   561  						t.Errorf("%6d: tok[%d] %q(|% x|) %s %s", iCase, i, src, src, g, e)
   562  
   563  					}
   564  					if l.errorCount+ss.ErrorCount != 0 || tok == gotoken.ILLEGAL {
   565  						continue
   566  					}
   567  
   568  					if lit2 == "" && tok2 != gotoken.EOF {
   569  						lit2 = tok2.String()
   570  					}
   571  					if g, e := lit, lit2; g != e {
   572  						nerr++
   573  						t.Errorf("%6d: lit[%d] %q(|% x|), %q(|% x|) %q(|% x|)", iCase, i, src, src, g, g, e, e)
   574  					}
   575  					if nerr >= 10 {
   576  						return
   577  					}
   578  				}
   579  				if tok == gotoken.EOF || tok2 == gotoken.EOF {
   580  					break
   581  				}
   582  			}
   583  			if c == classEOF || nx == nil {
   584  				continue
   585  			}
   586  
   587  			f(src, nx)
   588  		}
   589  		mn--
   590  	}
   591  	f("", lexL.Dfa[0])
   592  	if mn != 0 {
   593  		t.Errorf("states covered: %d/%d", mn0-mn, mn0)
   594  	} else {
   595  		t.Logf("states covered: %d/%d", mn0-mn, mn0)
   596  	}
   597  	t.Logf("test cases %v, total src len %v", cases, sum)
   598  }
   599  
   600  func testScannerBugs(t *testing.T) {
   601  	type toks []struct {
   602  		off int
   603  		pos string
   604  		tok gotoken.Token
   605  		lit string
   606  	}
   607  	l := newLexer(nil, nil)
   608  	n := *oN
   609  	nerr := 0
   610  	cases := 0
   611  	for i, v := range []struct {
   612  		src  string
   613  		toks toks
   614  	}{
   615  		{" (", toks{{1, "1:2", gotoken.LPAREN, "("}}},
   616  		{" (\n", toks{{1, "1:2", gotoken.LPAREN, "("}}},
   617  		{" z ", toks{{1, "1:2", gotoken.IDENT, "z"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   618  		{" z", toks{{1, "1:2", gotoken.IDENT, "z"}, {2, "1:3", gotoken.SEMICOLON, "\n"}}},
   619  		{" za ", toks{{1, "1:2", gotoken.IDENT, "za"}, {4, "1:5", gotoken.SEMICOLON, "\n"}}},
   620  		{" za", toks{{1, "1:2", gotoken.IDENT, "za"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   621  		{" « ", toks{{1, "1:2", tokenLTLT, "«"}}},
   622  		{" » ", toks{{1, "1:2", tokenGTGT, "»"}, {4, "1:5", gotoken.SEMICOLON, "\n"}}},
   623  		{"", nil},
   624  		{"'\\U00000000'!", toks{{0, "1:1", gotoken.CHAR, "'\\U00000000'"}, {12, "1:13", gotoken.NOT, "!"}}},
   625  		{"'\\U00000000'", toks{{0, "1:1", gotoken.CHAR, "'\\U00000000'"}, {12, "1:13", gotoken.SEMICOLON, "\n"}}},
   626  		{"'\\u0000'!", toks{{0, "1:1", gotoken.CHAR, "'\\u0000'"}, {8, "1:9", gotoken.NOT, "!"}}},
   627  		{"'\\u0000'", toks{{0, "1:1", gotoken.CHAR, "'\\u0000'"}, {8, "1:9", gotoken.SEMICOLON, "\n"}}},
   628  		{"'\\x00'!", toks{{0, "1:1", gotoken.CHAR, "'\\x00'"}, {6, "1:7", gotoken.NOT, "!"}}},
   629  		{"'\\x00'", toks{{0, "1:1", gotoken.CHAR, "'\\x00'"}, {6, "1:7", gotoken.SEMICOLON, "\n"}}},
   630  		{"'foo';", toks{{0, "1:1", gotoken.CHAR, "'foo'"}, {5, "1:6", gotoken.SEMICOLON, ";"}}},
   631  		{"( ", toks{{0, "1:1", gotoken.LPAREN, "("}}},
   632  		{"(", toks{{0, "1:1", gotoken.LPAREN, "("}}},
   633  		{".", toks{{0, "1:1", gotoken.PERIOD, "."}}},
   634  		{"..3", toks{{0, "1:1", gotoken.PERIOD, "."}, {1, "1:2", gotoken.FLOAT, ".3"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   635  		{"/***/func", toks{{5, "1:6", gotoken.FUNC, "func"}}},
   636  		{"/**/func", toks{{4, "1:5", gotoken.FUNC, "func"}}},
   637  		{"/*\n */\nfunc ", toks{{7, "3:1", gotoken.FUNC, "func"}}},
   638  		{"/*\n *\n */\nfunc ", toks{{10, "4:1", gotoken.FUNC, "func"}}},
   639  		{"/*\n*/\nfunc ", toks{{6, "3:1", gotoken.FUNC, "func"}}},
   640  		{"/*\n\n*/\nfunc ", toks{{7, "4:1", gotoken.FUNC, "func"}}},
   641  		{"//", nil},
   642  		{"//\n", nil},
   643  		{"//\n//", nil},
   644  		{"//\n//\n", nil},
   645  		{"//\n//\n@", toks{{6, "3:1", gotoken.ILLEGAL, "@"}}},
   646  		{"//\n//\nz", toks{{6, "3:1", gotoken.IDENT, "z"}, {7, "3:2", gotoken.SEMICOLON, "\n"}}},
   647  		{"//\n//\nz1", toks{{6, "3:1", gotoken.IDENT, "z1"}, {8, "3:3", gotoken.SEMICOLON, "\n"}}},
   648  		{"//\n@", toks{{3, "2:1", gotoken.ILLEGAL, "@"}}},
   649  		{"//\nz", toks{{3, "2:1", gotoken.IDENT, "z"}, {4, "2:2", gotoken.SEMICOLON, "\n"}}},
   650  		{"//\nz1", toks{{3, "2:1", gotoken.IDENT, "z1"}, {5, "2:3", gotoken.SEMICOLON, "\n"}}},
   651  		{";\xf0;", toks{{0, "1:1", gotoken.SEMICOLON, ";"}, {1, "1:2", gotoken.IDENT, "\xf0"}, {2, "1:3", gotoken.SEMICOLON, ";"}}},
   652  		{"<de..f0", toks{{0, "1:1", gotoken.LSS, "<"}, {1, "1:2", gotoken.IDENT, "de"}, {3, "1:4", gotoken.PERIOD, "."}, {4, "1:5", gotoken.PERIOD, "."}, {5, "1:6", gotoken.IDENT, "f0"}, {7, "1:8", gotoken.SEMICOLON, "\n"}}},
   653  		{"<dt..e?", toks{{0, "1:1", gotoken.LSS, "<"}, {1, "1:2", gotoken.IDENT, "dt"}, {3, "1:4", gotoken.PERIOD, "."}, {4, "1:5", gotoken.PERIOD, "."}, {5, "1:6", gotoken.IDENT, "e"}, {6, "1:7", gotoken.ILLEGAL, "?"}, {7, "1:8", gotoken.SEMICOLON, "\n"}}},
   654  		{"\"\\U00000000\"!", toks{{0, "1:1", gotoken.STRING, "\"\\U00000000\""}, {12, "1:13", gotoken.NOT, "!"}}},
   655  		{"\"\\U00000000\"", toks{{0, "1:1", gotoken.STRING, "\"\\U00000000\""}, {12, "1:13", gotoken.SEMICOLON, "\n"}}},
   656  		{"\"\\u0000\"!", toks{{0, "1:1", gotoken.STRING, "\"\\u0000\""}, {8, "1:9", gotoken.NOT, "!"}}},
   657  		{"\"\\u0000\"", toks{{0, "1:1", gotoken.STRING, "\"\\u0000\""}, {8, "1:9", gotoken.SEMICOLON, "\n"}}},
   658  		{"\"\\x00\"!", toks{{0, "1:1", gotoken.STRING, "\"\\x00\""}, {6, "1:7", gotoken.NOT, "!"}}},
   659  		{"\"\\x00\"", toks{{0, "1:1", gotoken.STRING, "\"\\x00\""}, {6, "1:7", gotoken.SEMICOLON, "\n"}}},
   660  		{"\xf0", toks{{0, "1:1", gotoken.IDENT, "\xf0"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   661  		{"\xf0;", toks{{0, "1:1", gotoken.IDENT, "\xf0"}, {1, "1:2", gotoken.SEMICOLON, ";"}}},
   662  		{"a @= b", toks{{0, "1:1", gotoken.IDENT, "a"}, {2, "1:3", gotoken.ILLEGAL, "@"}, {3, "1:4", gotoken.ASSIGN, "="}, {5, "1:6", gotoken.IDENT, "b"}, {6, "1:7", gotoken.SEMICOLON, "\n"}}},
   663  		{"a/**/", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   664  		{"a/**//**/", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   665  		{"a/*\n*/", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   666  		{"a/*\n*//**/", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   667  		{"a//", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   668  		{"a//\n", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   669  		{"a«z", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", tokenLTLT, "«"}, {3, "1:4", gotoken.IDENT, "z"}, {4, "1:5", gotoken.SEMICOLON, "\n"}}},
   670  		{"a»z", toks{{0, "1:1", gotoken.IDENT, "a"}, {1, "1:2", tokenGTGT, "»"}, {3, "1:4", gotoken.IDENT, "z"}, {4, "1:5", gotoken.SEMICOLON, "\n"}}},
   671  		{"d/*\\\n*/0", toks{{0, "1:1", gotoken.IDENT, "d"}, {1, "1:2", gotoken.SEMICOLON, "\n"}, {7, "2:3", gotoken.INT, "0"}, {8, "2:4", gotoken.SEMICOLON, "\n"}}},
   672  		{"import ( ", toks{{0, "1:1", gotoken.IMPORT, "import"}, {7, "1:8", gotoken.LPAREN, "("}}},
   673  		{"import (", toks{{0, "1:1", gotoken.IMPORT, "import"}, {7, "1:8", gotoken.LPAREN, "("}}},
   674  		{"import (\n", toks{{0, "1:1", gotoken.IMPORT, "import"}, {7, "1:8", gotoken.LPAREN, "("}}},
   675  		{"import (\n\t", toks{{0, "1:1", gotoken.IMPORT, "import"}, {7, "1:8", gotoken.LPAREN, "("}}},
   676  		{"z ", toks{{0, "1:1", gotoken.IDENT, "z"}, {2, "1:3", gotoken.SEMICOLON, "\n"}}},
   677  		{"z w", toks{{0, "1:1", gotoken.IDENT, "z"}, {2, "1:3", gotoken.IDENT, "w"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   678  		{"z", toks{{0, "1:1", gotoken.IDENT, "z"}, {1, "1:2", gotoken.SEMICOLON, "\n"}}},
   679  		{"za ", toks{{0, "1:1", gotoken.IDENT, "za"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   680  		{"za wa", toks{{0, "1:1", gotoken.IDENT, "za"}, {3, "1:4", gotoken.IDENT, "wa"}, {5, "1:6", gotoken.SEMICOLON, "\n"}}},
   681  		{"za", toks{{0, "1:1", gotoken.IDENT, "za"}, {2, "1:3", gotoken.SEMICOLON, "\n"}}},
   682  		{"«", toks{{0, "1:1", tokenLTLT, "«"}}},
   683  		{"«a", toks{{0, "1:1", tokenLTLT, "«"}, {2, "1:3", gotoken.IDENT, "a"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   684  		{"««", toks{{0, "1:1", tokenLTLT, "«"}, {2, "1:3", tokenLTLT, "«"}}},
   685  		{"«»", toks{{0, "1:1", tokenLTLT, "«"}, {2, "1:3", tokenGTGT, "»"}, {4, "1:5", gotoken.SEMICOLON, "\n"}}},
   686  		{"»", toks{{0, "1:1", tokenGTGT, "»"}, {2, "1:3", gotoken.SEMICOLON, "\n"}}},
   687  		{"»a", toks{{0, "1:1", tokenGTGT, "»"}, {2, "1:3", gotoken.IDENT, "a"}, {3, "1:4", gotoken.SEMICOLON, "\n"}}},
   688  		{"»«", toks{{0, "1:1", tokenGTGT, "»"}, {2, "1:3", tokenLTLT, "«"}}},
   689  		{"»»", toks{{0, "1:1", tokenGTGT, "»"}, {2, "1:3", tokenGTGT, "»"}, {4, "1:5", gotoken.SEMICOLON, "\n"}}},
   690  	} {
   691  		if n >= 0 && i != n {
   692  			continue
   693  		}
   694  
   695  		cases++
   696  		src := v.src
   697  		bsrc := []byte(src)
   698  		fi := token.NewFile("", len(src))
   699  		l.init(fi, bsrc)
   700  		for j, v := range v.toks {
   701  			off, tok := l.Scan()
   702  			if g, e := off, v.off; g != e {
   703  				nerr++
   704  				t.Errorf("%v off[%d] %q(|% x|) %v %v", i, j, src, src, g, e)
   705  			}
   706  			if g, e := l.position(off).String(), v.pos; g != e {
   707  				nerr++
   708  				t.Errorf("%v pos[%d] %q(|% x|) %q %q", i, j, src, src, g, e)
   709  			}
   710  			if g, e := tok, v.tok; g != e {
   711  				nerr++
   712  				t.Errorf("%v tok[%d] %q(|% x|) %q %q", i, j, src, src, g, e)
   713  			}
   714  			if g, e := string(l.lit), v.lit; g != e {
   715  				nerr++
   716  				t.Errorf("%v lit[%d] %q(|% x|) %q(|% x|) %q(|% x|)", i, j, src, src, g, g, e, e)
   717  			}
   718  			if nerr >= 10 {
   719  				return
   720  			}
   721  		}
   722  		off, tok := l.Scan()
   723  		if g, e := tok, gotoken.EOF; g != e {
   724  			nerr++
   725  			t.Errorf("%v tok %q(|% x|) %q %q", i, src, src, g, e)
   726  		}
   727  		if g, e := off, len(src); g != e {
   728  			nerr++
   729  			t.Errorf("%v off %q(|% x|) %v %v", i, src, src, g, e)
   730  		}
   731  		if nerr >= 10 {
   732  			return
   733  		}
   734  	}
   735  	t.Logf("test cases: %v", cases)
   736  }
   737  
   738  func testScanner(t *testing.T, paths []string) {
   739  	fset2 := gotoken.NewFileSet()
   740  	var s scanner.Scanner
   741  	l := newLexer(nil, nil)
   742  	sum := 0
   743  	toks := 0
   744  	files := 0
   745  	grt := filepath.Join(runtime.GOROOT(), "test")
   746  outer:
   747  	for _, path := range paths {
   748  		src, err := ioutil.ReadFile(path)
   749  		if err != nil {
   750  			t.Fatal(err)
   751  		}
   752  
   753  		sum += len(src)
   754  		fi := token.NewFile(path, len(src))
   755  		fi2 := fset2.AddFile(path, -1, len(src))
   756  		var se scanner.ErrorList
   757  		l.init(fi, src)
   758  		l.fname = path
   759  		l.commentHandler = func(off int, lit []byte) {
   760  			if bytes.HasPrefix(lit, lineDirective) {
   761  				if l.position(off).Column != 1 {
   762  					return
   763  				}
   764  
   765  				lit = bytes.TrimSpace(lit[len(lineDirective):])
   766  				if i := bytes.LastIndexByte(lit, ':'); i > 0 && i < len(lit)-1 {
   767  					fn := lit[:i]
   768  					ln := 0
   769  					for _, c := range lit[i+1:] {
   770  						if c >= '0' && c <= '9' {
   771  							ln = 10*ln + int(c-'0')
   772  							continue
   773  						}
   774  
   775  						return
   776  					}
   777  					s := filepath.Clean(string(fn))
   778  					if !filepath.IsAbs(s) {
   779  						s = filepath.Join(filepath.Dir(path), s)
   780  					}
   781  					if l.off != len(l.src) {
   782  						l.fname = s
   783  						l.file.AddLineInfo(l.off-1, s, ln-1)
   784  					}
   785  				}
   786  			}
   787  		}
   788  		l.errHandler = func(position token.Position, msg string, arg ...interface{}) {
   789  			se.Add(gotoken.Position(position), fmt.Sprintf(msg, arg...))
   790  		}
   791  		s.Init(fi2, src, nil, 0)
   792  		files++
   793  		for {
   794  			l.errorCount = 0
   795  			s.ErrorCount = 0
   796  
   797  			off, gt := l.Scan()
   798  			if gt == tokenBOM {
   799  				gt = gotoken.ILLEGAL
   800  			}
   801  			toks++
   802  			glit := string(l.lit)
   803  			pos, et, lit := s.Scan()
   804  			position := fi2.Position(pos)
   805  			g := l.position(off)
   806  			if e := token.Position(position); g != e {
   807  				t.Errorf("%s: position mismatch, expected %s", g, e)
   808  				continue outer
   809  			}
   810  
   811  			if l.errorCount != 0 && s.ErrorCount == 0 {
   812  				t.Error(errString(se))
   813  				continue outer
   814  			}
   815  
   816  			if gt == gotoken.EOF {
   817  				if et != gotoken.EOF {
   818  					t.Errorf("%s: unexpected eof", position)
   819  					continue outer
   820  				}
   821  
   822  				break
   823  			}
   824  
   825  			if g, e := gt, et; g != e {
   826  				// Whitelist $GOROOT/test/fixedbugs/issue11359.go:11:5: token mismatch "IDENT" "ILLEGAL"
   827  				if gt == gotoken.IDENT && et == gotoken.ILLEGAL && strings.HasPrefix(path, grt) {
   828  					continue outer
   829  				}
   830  
   831  				// Whitelist $GOROOT/test/syntax/ddd.go:10:5: token mismatch "." "ILLEGAL"
   832  				if gt == gotoken.PERIOD && et == gotoken.ILLEGAL && strings.HasPrefix(path, grt) {
   833  					continue outer
   834  				}
   835  
   836  				// Whitelist testdata/errchk/issue15292/0.go:33:12: token mismatch "token(87)" "ILLEGAL"
   837  				if (gt == tokenLTLT || gt == tokenGTGT) && et == gotoken.ILLEGAL && strings.HasPrefix(path, "testdata") {
   838  					continue outer
   839  				}
   840  
   841  				t.Errorf("%s: token mismatch %q %q", position, g, e)
   842  				continue outer
   843  			}
   844  
   845  			if lit == "" {
   846  				lit = gt.String()
   847  			}
   848  			if g, e := glit, lit; g != e {
   849  				// Whitelist $GOROOT/test/fixedbugs/bug163.go:10:2: literal mismatch "x⊛y" "x"
   850  				if gt == gotoken.IDENT && strings.HasPrefix(glit, lit) && strings.HasPrefix(path, grt) {
   851  					continue outer
   852  				}
   853  
   854  				t.Errorf("%s: literal mismatch %q %q", position, g, e)
   855  				continue outer
   856  			}
   857  		}
   858  	}
   859  	t.Logf("files: %v, toks: %v, bytes %v", files, toks, sum)
   860  }
   861  
   862  func TestScanner(t *testing.T) {
   863  	t.Run("States", testScannerStates)
   864  	t.Run("Bugs", testScannerBugs)
   865  	t.Run("GOROOT", func(t *testing.T) { testScanner(t, gorootFiles) })
   866  	t.Run("Errchk", func(t *testing.T) { testScanner(t, errchkFiles) })
   867  }
   868  
   869  func BenchmarkScanner(b *testing.B) {
   870  	b.Run("StdGo", func(b *testing.B) {
   871  		c := make(chan error, len(stdLibFiles))
   872  		fset := gotoken.NewFileSet()
   873  		b.ResetTimer()
   874  		var sum int32
   875  		for i := 0; i < b.N; i++ {
   876  			sum = 0
   877  			for _, v := range stdLibFiles {
   878  				go func(v string) {
   879  					src, err := ioutil.ReadFile(v)
   880  					if err != nil {
   881  						c <- err
   882  						return
   883  					}
   884  
   885  					atomic.AddInt32(&sum, int32(len(src)))
   886  					var s scanner.Scanner
   887  					s.Init(fset.AddFile(v, -1, len(src)), src, nil, 0)
   888  					for {
   889  						if _, tok, _ := s.Scan(); tok == gotoken.EOF {
   890  							break
   891  						}
   892  					}
   893  					c <- nil
   894  				}(v)
   895  			}
   896  			for range stdLibFiles {
   897  				if err := <-c; err != nil {
   898  					b.Fatal(err)
   899  				}
   900  			}
   901  		}
   902  		b.SetBytes(int64(sum))
   903  	})
   904  
   905  	b.Run("Std", func(b *testing.B) {
   906  		c := make(chan error, len(stdLibFiles))
   907  		b.ResetTimer()
   908  		var sum int32
   909  		for i := 0; i < b.N; i++ {
   910  			sum = 0
   911  			for _, v := range stdLibFiles {
   912  				go func(v string) {
   913  					f0, err := os.Open(v)
   914  					if err != nil {
   915  						c <- err
   916  						return
   917  					}
   918  
   919  					defer f0.Close()
   920  
   921  					src, err := mmap.Map(f0, 0, 0)
   922  					if err != nil {
   923  						c <- err
   924  						return
   925  					}
   926  
   927  					defer src.Unmap()
   928  
   929  					atomic.AddInt32(&sum, int32(len(src)))
   930  					sf := &SourceFile{File: token.NewFile(v, len(src))}
   931  					l := newLexer(sf, src)
   932  					for {
   933  						if _, tok := l.Scan(); tok == gotoken.EOF {
   934  							break
   935  						}
   936  					}
   937  					c <- nil
   938  				}(v)
   939  			}
   940  			for range stdLibFiles {
   941  				if err := <-c; err != nil {
   942  					b.Fatal(err)
   943  				}
   944  			}
   945  		}
   946  		b.SetBytes(int64(sum))
   947  	})
   948  }
   949  
   950  func BenchmarkParser(b *testing.B) {
   951  	var sum int32
   952  	b.Run("StdGo", func(b *testing.B) {
   953  		c := make(chan error, len(stdLibFiles))
   954  		fset := gotoken.NewFileSet()
   955  		b.ResetTimer()
   956  		for i := 0; i < b.N; i++ {
   957  			sum = 0
   958  			for _, v := range stdLibFiles {
   959  				go func(v string) {
   960  					src, err := ioutil.ReadFile(v)
   961  					if err != nil {
   962  						c <- err
   963  						return
   964  					}
   965  
   966  					atomic.AddInt32(&sum, int32(len(src)))
   967  					_, err = goparser.ParseFile(fset, v, src, 0)
   968  					c <- err
   969  				}(v)
   970  			}
   971  			for range stdLibFiles {
   972  				if err := <-c; err != nil {
   973  					b.Fatal(err)
   974  				}
   975  			}
   976  		}
   977  		b.SetBytes(int64(sum))
   978  	})
   979  
   980  	b.Run("Std", func(b *testing.B) {
   981  		a := make([]*Package, len(stdLibPackages))
   982  		errorList := newErrorList(1, false)
   983  		b.ResetTimer()
   984  		for i := 0; i < b.N; i++ {
   985  			b.StopTimer()
   986  			ctx, err := newTestContext()
   987  			if err != nil {
   988  				b.Fatal(err)
   989  			}
   990  
   991  			ctx.tweaks.noChecks = true
   992  			b.StartTimer()
   993  			for i, v := range stdLibPackages { //TODO parallel
   994  				a[i] = ctx.load(token.Position{}, v.ImportPath, nil, errorList)
   995  			}
   996  			for _, v := range a {
   997  				v.waitFor()
   998  			}
   999  			if err := errorList.error(); err != nil {
  1000  				b.Fatal(err)
  1001  			}
  1002  		}
  1003  		if sum != 0 {
  1004  			b.SetBytes(int64(sum))
  1005  		}
  1006  	})
  1007  }
  1008  
  1009  func BenchmarkChecker(b *testing.B) {
  1010  	var sum int
  1011  	b.Run("StdGo", func(b *testing.B) {
  1012  		type item struct {
  1013  			importPath string
  1014  			files      []string
  1015  		}
  1016  
  1017  		fset := gotoken.NewFileSet()
  1018  		conf := types.Config{Importer: importer.Default()}
  1019  		conf.FakeImportC = true
  1020  
  1021  		var list []item
  1022  		for _, v := range stdLibPackages {
  1023  			if v.ImportPath == "builtin" {
  1024  				continue
  1025  			}
  1026  
  1027  			item := item{importPath: v.ImportPath}
  1028  			for _, w := range v.SourceFiles {
  1029  				item.files = append(item.files, w.Path)
  1030  			}
  1031  			list = append(list, item)
  1032  		}
  1033  
  1034  		b.ResetTimer()
  1035  		for i := 0; i < b.N; i++ {
  1036  			for _, item := range list {
  1037  				var files []*ast.File
  1038  				for _, fn := range item.files {
  1039  					f, err := goparser.ParseFile(fset, fn, nil, 0)
  1040  					if err != nil {
  1041  						b.Fatal(err)
  1042  					}
  1043  
  1044  					files = append(files, f)
  1045  				}
  1046  				conf.Check(item.importPath, fset, files, nil)
  1047  			}
  1048  			if sum == 0 {
  1049  				b.StopTimer()
  1050  				fset.Iterate(func(f *gotoken.File) bool { sum += f.Size(); return true })
  1051  				b.StartTimer()
  1052  			}
  1053  		}
  1054  		if sum != 0 {
  1055  			b.SetBytes(int64(sum))
  1056  		}
  1057  	})
  1058  
  1059  	b.Run("Std", func(b *testing.B) {
  1060  		a := make([]*Package, len(stdLibPackages))
  1061  		errorList := newErrorList(1, false)
  1062  		b.ResetTimer()
  1063  		for i := 0; i < b.N; i++ {
  1064  			b.StopTimer()
  1065  			ctx, err := newTestContext()
  1066  			if err != nil {
  1067  				b.Fatal(err)
  1068  			}
  1069  
  1070  			b.StartTimer()
  1071  			for i, v := range stdLibPackages {
  1072  				a[i] = ctx.load(token.Position{}, v.ImportPath, nil, errorList)
  1073  			}
  1074  			for _, v := range a {
  1075  				v.waitFor()
  1076  			}
  1077  			if err := errorList.error(); err != nil {
  1078  				b.Fatal(err)
  1079  			}
  1080  		}
  1081  		if sum != 0 {
  1082  			b.SetBytes(int64(sum))
  1083  		}
  1084  	})
  1085  }
  1086  
  1087  type ylex struct {
  1088  	*lexer
  1089  	lbrace        int
  1090  	lbraceRule    int
  1091  	lbraceStack   []int
  1092  	loophack      bool
  1093  	loophackStack []bool
  1094  	p             *yparser
  1095  	pos           token.Position
  1096  	tok           gotoken.Token
  1097  }
  1098  
  1099  func (l *ylex) init(file *token.File, src []byte) {
  1100  	l.lexer.init(file, src)
  1101  	l.lbrace = 0
  1102  	l.lbraceStack = l.lbraceStack[:0]
  1103  	l.loophack = false
  1104  	l.loophackStack = l.loophackStack[:0]
  1105  }
  1106  
  1107  func newYlex(l *lexer, p *yparser) *ylex {
  1108  	yl := &ylex{lexer: l, p: p}
  1109  	for _, v := range p.Rules {
  1110  		if v.Sym.Name == "lbrace" {
  1111  			yl.lbraceRule = v.RuleNum
  1112  			break
  1113  		}
  1114  	}
  1115  	return yl
  1116  }
  1117  
  1118  func (l *ylex) lex() (token.Position, *y.Symbol) {
  1119  	off, tok := l.Scan()
  1120  	l.pos = l.position(off)
  1121  	sym, ok := l.p.tok2sym[tok]
  1122  	if !ok {
  1123  		panic(fmt.Sprintf("%s: missing symbol for token %q", l.pos, tok))
  1124  	}
  1125  
  1126  	switch tok {
  1127  	case gotoken.FOR, gotoken.IF, gotoken.SELECT, gotoken.SWITCH:
  1128  		l.loophack = true
  1129  	case gotoken.LPAREN, gotoken.LBRACK:
  1130  		if l.loophack || len(l.loophackStack) != 0 {
  1131  			l.loophackStack = append(l.loophackStack, l.loophack)
  1132  			l.loophack = false
  1133  		}
  1134  	case gotoken.RPAREN, gotoken.RBRACK:
  1135  		if n := len(l.loophackStack); n != 0 {
  1136  			l.loophack = l.loophackStack[n-1]
  1137  			l.loophackStack = l.loophackStack[:n-1]
  1138  		}
  1139  	case gotoken.LBRACE:
  1140  		l.lbrace++
  1141  		if l.loophack {
  1142  			tok = tokenBODY
  1143  			sym = l.p.tok2sym[tok]
  1144  			l.loophack = false
  1145  		}
  1146  	case gotoken.RBRACE:
  1147  		l.lbrace--
  1148  		if n := len(l.lbraceStack); n != 0 && l.lbraceStack[n-1] == l.lbrace {
  1149  			l.lbraceStack = l.lbraceStack[:n-1]
  1150  			l.loophack = true
  1151  		}
  1152  	}
  1153  	l.tok = tok
  1154  	return l.pos, sym
  1155  }
  1156  
  1157  func (l *ylex) fixLbr() {
  1158  	n := l.lbrace - 1
  1159  	switch l.tok {
  1160  	case gotoken.RBRACE:
  1161  		l.loophack = true
  1162  		return
  1163  	case gotoken.LBRACE:
  1164  		n--
  1165  	}
  1166  
  1167  	l.lbraceStack = append(l.lbraceStack, n)
  1168  }
  1169  
  1170  type yparser struct {
  1171  	*yParser
  1172  	reduce func(int)
  1173  	trace  func(int)
  1174  	yyS    []int
  1175  	yySyms []*y.Symbol
  1176  	yychar *y.Symbol
  1177  }
  1178  
  1179  func newYParser(reduce, trace func(int)) *yparser {
  1180  	return &yparser{
  1181  		yParser: yp0,
  1182  		reduce:  reduce,
  1183  		trace:   trace,
  1184  	}
  1185  }
  1186  
  1187  func (p *yparser) parse(lex func(int) *y.Symbol) error {
  1188  	yystate := 0
  1189  	p.yyS = p.yyS[:0]
  1190  	p.yySyms = p.yySyms[:0]
  1191  	p.yychar = nil
  1192  	for {
  1193  		p.yyS = append(p.yyS, yystate)
  1194  		if p.trace != nil {
  1195  			p.trace(yystate)
  1196  		}
  1197  		if p.yychar == nil {
  1198  			p.yychar = lex(yystate)
  1199  		}
  1200  		switch typ, arg := p.action(yystate, p.yychar).Kind(); typ {
  1201  		case 'a':
  1202  			return nil
  1203  		case 's':
  1204  			p.yySyms = append(p.yySyms, p.yychar)
  1205  			p.yychar = nil
  1206  			yystate = arg
  1207  		case 'r':
  1208  			rule := p.Rules[arg]
  1209  			if p.reduce != nil {
  1210  				p.reduce(rule.RuleNum)
  1211  			}
  1212  			n := len(p.yyS)
  1213  			m := len(rule.Components)
  1214  			p.yyS = p.yyS[:n-m]
  1215  			p.yySyms = append(p.yySyms[:n-m-1], rule.Sym)
  1216  			n -= m
  1217  			if p.trace != nil {
  1218  				p.trace(p.yyS[n-1])
  1219  			}
  1220  			_, yystate = p.action(p.yyS[n-1], rule.Sym).Kind()
  1221  		default:
  1222  			return p.fail(yystate)
  1223  		}
  1224  	}
  1225  }
  1226  
  1227  func (p *yparser) fail(yystate int) error {
  1228  	var a []string
  1229  	for _, v := range p.Table[yystate] {
  1230  		nm := v.Sym.Name
  1231  		if nm == "$end" {
  1232  			nm = "EOF"
  1233  		}
  1234  		if l := v.Sym.LiteralString; l != "" {
  1235  			nm = l
  1236  		}
  1237  		a = append(a, nm)
  1238  	}
  1239  	sort.Strings(a)
  1240  	return fmt.Errorf("no action for %s in state %d, follow set: [%v]", p.yychar, yystate, strings.Join(a, ", "))
  1241  }
  1242  
  1243  func (p *yparser) report(states []int) string {
  1244  	var b bytes.Buffer
  1245  	for _, state := range states {
  1246  		b.Write(p.reports[state])
  1247  		b.WriteString("----\n")
  1248  	}
  1249  	return b.String()
  1250  }
  1251  
  1252  func (p *yparser) sym2str(sym *y.Symbol) string {
  1253  	nm := sym.Name
  1254  	if nm == "$end" {
  1255  		return ""
  1256  	}
  1257  
  1258  	if nm[0] == '\'' {
  1259  		return nm[1:2]
  1260  	}
  1261  
  1262  	if nm[0] >= 'A' && nm[0] <= 'Z' {
  1263  		if s := sym.LiteralString; s != "" {
  1264  			return s[1 : len(s)-1]
  1265  		}
  1266  
  1267  		if s := p.tok2str(str2token[nm]); s != "" {
  1268  			return s
  1269  		}
  1270  	}
  1271  
  1272  	return "@"
  1273  }
  1274  
  1275  func (*yparser) tok2str(tok gotoken.Token) string {
  1276  	switch tok {
  1277  	case gotoken.ILLEGAL:
  1278  		return "@"
  1279  	case gotoken.COMMENT, gotoken.EOF, tokenNL, tokenLTLT, tokenGTGT:
  1280  		return ""
  1281  	case gotoken.IDENT:
  1282  		return "a"
  1283  	case gotoken.INT:
  1284  		return "1"
  1285  	case gotoken.FLOAT:
  1286  		return "2.3"
  1287  	case gotoken.IMAG:
  1288  		return "4i"
  1289  	case gotoken.CHAR:
  1290  		return "'b'"
  1291  	case gotoken.STRING:
  1292  		return `"c"`
  1293  	case tokenBODY:
  1294  		return "{"
  1295  	default:
  1296  		return tok.String()
  1297  	}
  1298  }
  1299  
  1300  func (p *yparser) newCover() map[int]struct{} {
  1301  	r := make(map[int]struct{}, len(p.States))
  1302  	for i := range p.States {
  1303  		r[i] = struct{}{}
  1304  	}
  1305  	return r
  1306  }
  1307  
  1308  func (p *yparser) followList(state int) (r []*y.Symbol) {
  1309  	for _, v := range p.Table[state] {
  1310  		if v.Sym.IsTerminal {
  1311  			r = append(r, v.Sym)
  1312  		}
  1313  	}
  1314  	return r
  1315  }
  1316  
  1317  func (p *yparser) followSet(state int) map[*y.Symbol]struct{} {
  1318  	l := p.followList(state)
  1319  	m := make(map[*y.Symbol]struct{}, len(l))
  1320  	for _, v := range l {
  1321  		m[v] = struct{}{}
  1322  	}
  1323  	return m
  1324  }
  1325  
  1326  func (p *yparser) action(state int, sym *y.Symbol) *y.Action {
  1327  	for _, v := range p.Table[state] {
  1328  		if v.Sym == sym {
  1329  			return &v
  1330  		}
  1331  	}
  1332  	return nil
  1333  }
  1334  
  1335  func testParserYacc(t *testing.T, files []string) {
  1336  	var cover map[int]struct{}
  1337  	var yl *ylex
  1338  	yp := newYParser(
  1339  		func(rule int) {
  1340  			if rule == yl.lbraceRule {
  1341  				yl.fixLbr()
  1342  			}
  1343  		},
  1344  		func(state int) { delete(cover, state) },
  1345  	)
  1346  	cover = yp.newCover()
  1347  	cn0 := len(cover)
  1348  	l := newLexer(nil, nil)
  1349  	yl = newYlex(l, yp)
  1350  	sum := 0
  1351  	toks := 0
  1352  	nfiles := 0
  1353  	for _, path := range files {
  1354  		src, err := ioutil.ReadFile(path)
  1355  		if err != nil {
  1356  			t.Fatal(err)
  1357  		}
  1358  
  1359  		nfiles++
  1360  		sum += len(src)
  1361  		yl.init(token.NewFile(path, len(src)), src)
  1362  		var pos token.Position
  1363  		if err = yp.parse(
  1364  			func(int) (s *y.Symbol) {
  1365  				pos, s = yl.lex()
  1366  				toks++
  1367  				return s
  1368  			},
  1369  		); err != nil {
  1370  			t.Fatalf("%s: %v", pos, err)
  1371  		}
  1372  	}
  1373  	if cn := len(cover); cn != 0 {
  1374  		t.Errorf("states covered: %d/%d", cn0-cn, cn0)
  1375  	} else {
  1376  		t.Logf("states covered: %d/%d", cn0-cn, cn0)
  1377  	}
  1378  	t.Logf("files: %v, toks: %v, bytes %v", nfiles, toks, sum)
  1379  	e := -1
  1380  	for s := range cover {
  1381  		if e < 0 || e > s {
  1382  			e = s
  1383  		}
  1384  	}
  1385  	if e >= 0 {
  1386  		t.Errorf("states %v, unused %v, first unused state %v", len(yp.States), len(cover), e)
  1387  	}
  1388  }
  1389  
  1390  func (p *parser) fail(nm string) string {
  1391  	var yl *ylex
  1392  	var states []int
  1393  	yp := newYParser(
  1394  		func(rule int) {
  1395  			if rule == yl.lbraceRule {
  1396  				yl.fixLbr()
  1397  			}
  1398  		},
  1399  		func(st int) { states = append(states, st) },
  1400  	)
  1401  	yl = newYlex(newLexer(nil, nil), yp)
  1402  	yl.init(token.NewFile("", len(p.l.src)), p.l.src)
  1403  	yp.parse(
  1404  		func(st int) *y.Symbol {
  1405  			if pos, s := yl.lex(); pos.Offset <= p.off {
  1406  				return s
  1407  			}
  1408  
  1409  			return yp.Syms["$end"]
  1410  		},
  1411  	)
  1412  	return yp.report(states[mathutil.Max(0, len(states)-12):])
  1413  }
  1414  
  1415  func (p *parser) todo() {
  1416  	_, fn, fl, _ := runtime.Caller(1)
  1417  	p.err("%q=%q: TODO %v:%v", p.c, p.l.lit, fn, fl) //TODOOK
  1418  }
  1419  
  1420  func newTestContext(opt ...Option) (*Context, error) {
  1421  	a := strings.Split(strutil.Gopath(), string(os.PathListSeparator))
  1422  	for i, v := range a {
  1423  		a[i] = filepath.Join(v, "src")
  1424  	}
  1425  	tags := build.Default.ReleaseTags
  1426  	if os.Getenv("CGO_ENABLED") != "0" {
  1427  		tags = append(tags[:len(tags):len(tags)], "cgo")
  1428  	}
  1429  	return NewContext(runtime.GOOS, runtime.GOARCH, tags, append([]string{filepath.Join(runtime.GOROOT(), "src")}, a...), opt...)
  1430  }
  1431  
  1432  func testParser(t *testing.T, packages []*Package) {
  1433  	ctx, err := newTestContext()
  1434  	if err != nil {
  1435  		t.Fatal(err)
  1436  		return
  1437  	}
  1438  
  1439  	for _, v := range packages {
  1440  		errorList := newErrorList(0, false)
  1441  		ctx.tweaks.noChecks = strings.Contains(v.ImportPath, filepath.FromSlash("/testdata/parser"))
  1442  		ctx.load(
  1443  			token.Position{},
  1444  			v.ImportPath,
  1445  			func(p *parser) {
  1446  				p.err("syntax error\n----\n%s", p.fail(p.sourceFile.Path))
  1447  			},
  1448  			errorList,
  1449  		).waitFor()
  1450  		if err := errorList.error(); err != nil {
  1451  			s := strings.TrimSpace(err.Error())
  1452  			a := strings.Split(s, "\n")
  1453  			if len(a) == 3 && strings.Contains(s, `cannot find package "github.com/google/pprof/third_party/svg" in any of:`) {
  1454  				continue
  1455  			}
  1456  
  1457  			t.Fatal(s)
  1458  		}
  1459  	}
  1460  	t.Logf("packages: %v", len(packages))
  1461  }
  1462  
  1463  func testParserRejectFS(t *testing.T) {
  1464  	ctx, err := newTestContext()
  1465  	if err != nil {
  1466  		t.Fatal(err)
  1467  	}
  1468  
  1469  	ctx.tweaks.ignoreImports = true
  1470  	ctx.tweaks.ignoreRedeclarations = true
  1471  	yp := newYParser(nil, nil)
  1472  	l := newLexer(nil, nil)
  1473  	cases := 0
  1474  	for state, s := range yp0.States {
  1475  		syms, _ := s.Syms0()
  1476  		var a []string
  1477  		for _, sym := range syms {
  1478  			a = append(a, yp.sym2str(sym))
  1479  		}
  1480  		s0 := strings.Join(a, " ") + " "
  1481  		fs := yp.followSet(state)
  1482  		for _, sym := range yp.Syms {
  1483  			if !sym.IsTerminal {
  1484  				continue
  1485  			}
  1486  
  1487  			if _, ok := fs[sym]; ok {
  1488  				continue
  1489  			}
  1490  
  1491  			s := s0 + yp.sym2str(sym) + "@"
  1492  			//dbg("%s\n", s)
  1493  			l.init(token.NewFile("", len(s)), []byte(s))
  1494  			pkg := newPackage(ctx, "", "", newErrorList(-1, false))
  1495  			sf := newSourceFile(pkg, "", nil, nil)
  1496  			p := newParser(sf, l)
  1497  			off := -1
  1498  			p.syntaxError = func(*parser) {
  1499  				if off < 0 {
  1500  					off = p.off
  1501  				}
  1502  			}
  1503  			cases++
  1504  			p.file()
  1505  			if off < 0 {
  1506  				t.Fatalf(`%d: "%s" unexpected success, final sym %q`, state, s, sym)
  1507  			}
  1508  
  1509  			if g, e := off, len(s0); g < e {
  1510  				t.Fatalf(`Follow set %v
  1511  state %3d: %s unexpected error position, got %v expected %v
  1512             %s^`, yp.followList(state), state, s, g+1, e+1, strings.Repeat("-", g))
  1513  			}
  1514  		}
  1515  	}
  1516  	t.Logf("test cases: %v", cases)
  1517  }
  1518  
  1519  func quoteErrMessage(s string) string {
  1520  	return strings.Replace(strings.Replace(s, "\t", "\\t", -1), "\n", "\\n", -1)
  1521  }
  1522  
  1523  type errchk struct {
  1524  	re  []byte
  1525  	pos token.Position
  1526  }
  1527  
  1528  type errchks []errchk
  1529  
  1530  func (e *errchks) comment(position token.Position, s []byte) {
  1531  	s = bytes.TrimPrefix(s, generalCommentStart)
  1532  	s = bytes.TrimSuffix(s, generalCommentEnd)
  1533  	s = bytes.TrimSpace(s)
  1534  	n := len(errCheckMark1)
  1535  	i := bytes.LastIndex(s, errCheckMark1)
  1536  	j := bytes.LastIndex(s, errCheckDisbled)
  1537  	if i < 0 {
  1538  		i = bytes.LastIndex(s, errCheckMark2)
  1539  		n = len(errCheckMark2)
  1540  		if i < 0 {
  1541  			return // No check found.
  1542  		}
  1543  	}
  1544  
  1545  	if j >= 0 && j < i {
  1546  		return // Check disabled.
  1547  	}
  1548  
  1549  	s = s[i+n:]
  1550  	s = bytes.TrimSpace(s)
  1551  	*e = append(*e, errchk{s, position})
  1552  }
  1553  
  1554  func (e errchks) errors(t *testing.T, err scanner.ErrorList, fname string, syntaxOnly bool) {
  1555  	err.Sort()
  1556  	if *oRE != "" {
  1557  		for _, v := range e {
  1558  			t.Log(v)
  1559  		}
  1560  		if len(e) != 0 {
  1561  			t.Logf("FAIL\n%s", errString(err))
  1562  		}
  1563  	}
  1564  
  1565  	got := map[int][]*scanner.Error{}
  1566  	var gota []int
  1567  	for _, v := range err {
  1568  		p := filepath.ToSlash(v.Pos.Filename)
  1569  		if !filepath.IsAbs(p) {
  1570  			line := v.Pos.Line
  1571  			got[line] = append(got[line], v)
  1572  			gota = append(gota, line)
  1573  		}
  1574  	}
  1575  	gota = gota[:sortutil.Dedupe(sort.IntSlice(gota))]
  1576  
  1577  	expect := map[int]errchk{}
  1578  	for _, v := range e {
  1579  		expect[v.pos.Line] = v
  1580  	}
  1581  
  1582  	var a scanner.ErrorList
  1583  	var fail bool
  1584  outer:
  1585  	for _, line := range gota {
  1586  		matched := false
  1587  		var g0, g *scanner.Error
  1588  		var e errchk
  1589  	inner:
  1590  		for _, g = range got[line] {
  1591  			if g0 == nil {
  1592  				g0 = g
  1593  			}
  1594  			var ok bool
  1595  			if e, ok = expect[line]; !ok {
  1596  				a = append(a, &scanner.Error{Pos: g.Pos, Msg: fmt.Sprintf("[FAIL errorcheck: extra error] %s", quoteErrMessage(g.Msg))})
  1597  				fail = true
  1598  				continue outer
  1599  			}
  1600  
  1601  			for _, v := range errCheckPatterns.FindAllSubmatch(e.re, -1) {
  1602  				re := v[1]
  1603  				ok, err := regexp.MatchString(string(re), strings.SplitN(g.Error(), ": ", 2)[1])
  1604  				if err != nil {
  1605  					t.Fatal(err)
  1606  				}
  1607  
  1608  				if ok {
  1609  					if !syntaxOnly {
  1610  						a = append(a, &scanner.Error{Pos: g.Pos, Msg: fmt.Sprintf("[PASS errorcheck] %s: %s", e.re, quoteErrMessage(g.Msg))})
  1611  					}
  1612  					matched = true
  1613  					break inner
  1614  				}
  1615  			}
  1616  		}
  1617  		if !matched && !syntaxOnly {
  1618  			a = append(a, &scanner.Error{Pos: g.Pos, Msg: fmt.Sprintf("[FAIL errorcheck: error does not match] %s: %s", e.re, quoteErrMessage(g0.Msg))})
  1619  			fail = true
  1620  		}
  1621  		delete(expect, line)
  1622  	}
  1623  	if !fail && (len(expect) == 0 || syntaxOnly) {
  1624  		//t.Logf("[PASS errorcheck] %v\n", fname)
  1625  	}
  1626  	if !syntaxOnly {
  1627  		for _, e := range expect {
  1628  			a = append(a, &scanner.Error{Pos: gotoken.Position(e.pos), Msg: fmt.Sprintf("[FAIL errorcheck: missing error] %s", e.re)})
  1629  		}
  1630  	}
  1631  	a.Sort()
  1632  	if len(a) != 0 {
  1633  		t.Fatalf("\n%s", errString(a))
  1634  	}
  1635  }
  1636  
  1637  // Verify no syntax errors are reported for lines w/o the magic [GC_]ERROR
  1638  // comments.
  1639  func testParserErrchk(t *testing.T) {
  1640  	ctx, err := newTestContext()
  1641  	if err != nil {
  1642  		t.Fatal(err)
  1643  	}
  1644  
  1645  	var (
  1646  		checks errchks
  1647  		errors scanner.ErrorList
  1648  		l      = newLexer(nil, nil)
  1649  	)
  1650  
  1651  	ctx.tweaks.ignoreImports = true
  1652  	l.commentHandler = func(off int, lit []byte) {
  1653  		checks.comment(l.position(off), lit)
  1654  	}
  1655  	for _, fn := range errchkFiles {
  1656  		src, err := ioutil.ReadFile(fn)
  1657  		if err != nil {
  1658  			t.Fatal(err)
  1659  		}
  1660  
  1661  		l.init(token.NewFile(fn, len(src)), src)
  1662  		pkg := newPackage(ctx, "", "", newErrorList(-1, false))
  1663  		sf := newSourceFile(pkg, fn, nil, nil)
  1664  		sf.File = l.file
  1665  		l.sourceFile = sf
  1666  		p := newParser(sf, l)
  1667  		p.syntaxError = func(*parser) {
  1668  			// Whitelist cases like
  1669  			//	testdata/errchk/gc/syntax/semi1.go:14:2: [FAIL errorcheck: extra error] syntax error, lookahead "EOF"=""
  1670  			if p.c == gotoken.EOF || p.l.off+1 >= len(p.l.src)-1 {
  1671  				return
  1672  			}
  1673  
  1674  			errors.Add(gotoken.Position(p.position()), fmt.Sprintf("syntax error, lookahead %q=%q, p.l.off %d/%d", p.c, p.l.lit, p.l.off, len(p.l.src)))
  1675  		}
  1676  		errors = errors[:0]
  1677  		checks = checks[:0]
  1678  		p.file()
  1679  		for l.c != classEOF {
  1680  			l.Scan()
  1681  		}
  1682  		checks.errors(t, errors, fn, true)
  1683  	}
  1684  	t.Logf("files: %v", len(errchkFiles))
  1685  }
  1686  
  1687  func TestParser(t *testing.T) {
  1688  	cover := append(gorootFiles, yaccCover)
  1689  	coverPackages := append(gorootPackages, newPackage(nil, filepath.Join(selfImportPath, filepath.Dir(yaccCover)), "", nil))
  1690  	t.Run("Yacc", func(t *testing.T) { testParserYacc(t, cover) })
  1691  	t.Run("GOROOT", func(t *testing.T) { testParser(t, coverPackages) })
  1692  	t.Run("RejectFollowSet", testParserRejectFS)
  1693  	t.Run("Errchk", testParserErrchk)
  1694  }
  1695  
  1696  // https://gitlab.com/cznic/browse/issues/3
  1697  func TestBrowserIssue3(t *testing.T) {
  1698  	ctx, err := newTestContext() //"noasm")
  1699  	if err != nil {
  1700  		t.Fatal(err)
  1701  	}
  1702  
  1703  	if _, _, _, err := ctx.FilesForImportPath(token.Position{}, "github.com/gonum/matrix/mat64"); err != nil {
  1704  		return
  1705  	}
  1706  
  1707  	_, err = ctx.Load("github.com/gonum/matrix/mat64")
  1708  	if err != nil {
  1709  		t.Fatal(err)
  1710  	}
  1711  }
  1712  
  1713  func TestPrecedence(t *testing.T) {
  1714  	ctx, err := newTestContext()
  1715  	if err != nil {
  1716  		t.Fatal(err)
  1717  		return
  1718  	}
  1719  
  1720  	p, err := ctx.Load(filepath.Join(selfImportPath, "testdata", "parser", "precedence"))
  1721  	if err != nil {
  1722  		t.Fatal(err)
  1723  	}
  1724  
  1725  	x := p.Scope.Bindings["main"]
  1726  	if g, e := strings.TrimSpace(pretty2(x)), strings.TrimSpace(`
  1727  &gc.FuncDecl{
  1728  · Body: &gc.StmtBlock{
  1729  · · List: []*gc.SimpleStmtAssignment{ // len 4
  1730  · · · 0: &gc.SimpleStmtAssignment{
  1731  · · · · LHS: []*gc.PrimaryExprIdent{ // len 1
  1732  · · · · · 0: &gc.PrimaryExprIdent{
  1733  · · · · · · Ident: precedence.go:4:2: "_",
  1734  · · · · · },
  1735  · · · · },
  1736  · · · · RHS: []*gc.PrimaryExprIntLiteral{ // len 1
  1737  · · · · · 0: &gc.PrimaryExprIntLiteral{
  1738  · · · · · · Literal: precedence.go:4:6: "1",
  1739  · · · · · },
  1740  · · · · },
  1741  · · · },
  1742  · · · 1: &gc.SimpleStmtAssignment{
  1743  · · · · LHS: []*gc.PrimaryExprIdent{ // len 1
  1744  · · · · · 0: &gc.PrimaryExprIdent{
  1745  · · · · · · Ident: precedence.go:15:2: "_",
  1746  · · · · · },
  1747  · · · · },
  1748  · · · · RHS: []*gc.ExprADD{ // len 1
  1749  · · · · · 0: &gc.ExprADD{
  1750  · · · · · · LHS: &gc.PrimaryExprIntLiteral{
  1751  · · · · · · · Literal: precedence.go:15:6: "1",
  1752  · · · · · · },
  1753  · · · · · · RHS: &gc.PrimaryExprIntLiteral{
  1754  · · · · · · · Literal: precedence.go:15:10: "2",
  1755  · · · · · · },
  1756  · · · · · },
  1757  · · · · },
  1758  · · · },
  1759  · · · 2: &gc.SimpleStmtAssignment{
  1760  · · · · LHS: []*gc.PrimaryExprIdent{ // len 1
  1761  · · · · · 0: &gc.PrimaryExprIdent{
  1762  · · · · · · Ident: precedence.go:29:2: "_",
  1763  · · · · · },
  1764  · · · · },
  1765  · · · · RHS: []*gc.ExprADD{ // len 1
  1766  · · · · · 0: &gc.ExprADD{
  1767  · · · · · · LHS: &gc.PrimaryExprIntLiteral{
  1768  · · · · · · · Literal: precedence.go:29:6: "1",
  1769  · · · · · · },
  1770  · · · · · · RHS: &gc.ExprMUL{
  1771  · · · · · · · LHS: &gc.PrimaryExprIntLiteral{
  1772  · · · · · · · · Literal: precedence.go:29:10: "2",
  1773  · · · · · · · },
  1774  · · · · · · · RHS: &gc.PrimaryExprIntLiteral{
  1775  · · · · · · · · Literal: precedence.go:29:12: "3",
  1776  · · · · · · · },
  1777  · · · · · · },
  1778  · · · · · },
  1779  · · · · },
  1780  · · · },
  1781  · · · 3: &gc.SimpleStmtAssignment{
  1782  · · · · LHS: []*gc.PrimaryExprIdent{ // len 1
  1783  · · · · · 0: &gc.PrimaryExprIdent{
  1784  · · · · · · Ident: precedence.go:49:2: "_",
  1785  · · · · · },
  1786  · · · · },
  1787  · · · · RHS: []*gc.ExprADD{ // len 1
  1788  · · · · · 0: &gc.ExprADD{
  1789  · · · · · · LHS: &gc.ExprMUL{
  1790  · · · · · · · LHS: &gc.PrimaryExprIntLiteral{
  1791  · · · · · · · · Literal: precedence.go:49:6: "1",
  1792  · · · · · · · },
  1793  · · · · · · · RHS: &gc.PrimaryExprIntLiteral{
  1794  · · · · · · · · Literal: precedence.go:49:8: "2",
  1795  · · · · · · · },
  1796  · · · · · · },
  1797  · · · · · · RHS: &gc.PrimaryExprIntLiteral{
  1798  · · · · · · · Literal: precedence.go:49:12: "3",
  1799  · · · · · · },
  1800  · · · · · },
  1801  · · · · },
  1802  · · · },
  1803  · · },
  1804  · },
  1805  }
  1806  `); g != e {
  1807  		t.Fatalf("got\n%s\nexp\n%s", g, e)
  1808  	}
  1809  }