github.com/gnolang/gno@v0.0.0-20240520182011-228e9d0192ce/gnovm/pkg/gnomod/read.go (about)

     1  // Copyright 2018 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in here[1].
     4  //
     5  // [1]: https://cs.opensource.google/go/x/mod/+/master:LICENSE
     6  //
     7  // Mostly copied and modified from:
     8  // - golang.org/x/mod/modfile/read.go
     9  // - golang.org/x/mod/modfile/rule.go
    10  
    11  package gnomod
    12  
    13  import (
    14  	"bytes"
    15  	"errors"
    16  	"fmt"
    17  	"os"
    18  	"path/filepath"
    19  	"regexp"
    20  	"strconv"
    21  	"strings"
    22  	"unicode"
    23  	"unicode/utf8"
    24  
    25  	"golang.org/x/mod/modfile"
    26  	"golang.org/x/mod/module"
    27  )
    28  
    29  // An input represents a single input file being parsed.
    30  type input struct {
    31  	// Lexing state.
    32  	filename   string            // name of input file, for errors
    33  	complete   []byte            // entire input
    34  	remaining  []byte            // remaining input
    35  	tokenStart []byte            // token being scanned to end of input
    36  	token      token             // next token to be returned by lex, peek
    37  	pos        modfile.Position  // current input position
    38  	comments   []modfile.Comment // accumulated comments
    39  
    40  	// Parser state.
    41  	file        *modfile.FileSyntax // returned top-level syntax tree
    42  	parseErrors modfile.ErrorList   // errors encountered during parsing
    43  
    44  	// Comment assignment state.
    45  	pre  []modfile.Expr // all expressions, in preorder traversal
    46  	post []modfile.Expr // all expressions, in postorder traversal
    47  }
    48  
    49  func newInput(filename string, data []byte) *input {
    50  	return &input{
    51  		filename:  filename,
    52  		complete:  data,
    53  		remaining: data,
    54  		pos:       modfile.Position{Line: 1, LineRune: 1, Byte: 0},
    55  	}
    56  }
    57  
    58  // parse parses the input file.
    59  func parse(file string, data []byte) (f *modfile.FileSyntax, err error) {
    60  	// The parser panics for both routine errors like syntax errors
    61  	// and for programmer bugs like array index errors.
    62  	// Turn both into error returns. Catching bug panics is
    63  	// especially important when processing many files.
    64  	in := newInput(file, data)
    65  
    66  	defer func() {
    67  		if e := recover(); e != nil && e != &in.parseErrors {
    68  			in.parseErrors = append(in.parseErrors, modfile.Error{
    69  				Filename: in.filename,
    70  				Pos:      in.pos,
    71  				Err:      fmt.Errorf("internal error: %v", e),
    72  			})
    73  		}
    74  		if err == nil && len(in.parseErrors) > 0 {
    75  			err = in.parseErrors
    76  		}
    77  	}()
    78  
    79  	// Prime the lexer by reading in the first token. It will be available
    80  	// in the next peek() or lex() call.
    81  	in.readToken()
    82  
    83  	// Invoke the parser.
    84  	in.parseFile()
    85  	if len(in.parseErrors) > 0 {
    86  		return nil, in.parseErrors
    87  	}
    88  	in.file.Name = in.filename
    89  
    90  	// Assign comments to nearby syntax.
    91  	in.assignComments()
    92  
    93  	return in.file, nil
    94  }
    95  
    96  // Error is called to report an error.
    97  // Error does not return: it panics.
    98  func (in *input) Error(s string) {
    99  	in.parseErrors = append(in.parseErrors, modfile.Error{
   100  		Filename: in.filename,
   101  		Pos:      in.pos,
   102  		Err:      errors.New(s),
   103  	})
   104  	panic(&in.parseErrors)
   105  }
   106  
   107  // eof reports whether the input has reached end of file.
   108  func (in *input) eof() bool {
   109  	return len(in.remaining) == 0
   110  }
   111  
   112  // peekRune returns the next rune in the input without consuming it.
   113  func (in *input) peekRune() int {
   114  	if len(in.remaining) == 0 {
   115  		return 0
   116  	}
   117  	r, _ := utf8.DecodeRune(in.remaining)
   118  	return int(r)
   119  }
   120  
   121  // peekPrefix reports whether the remaining input begins with the given prefix.
   122  func (in *input) peekPrefix(prefix string) bool {
   123  	// This is like bytes.HasPrefix(in.remaining, []byte(prefix))
   124  	// but without the allocation of the []byte copy of prefix.
   125  	for i := 0; i < len(prefix); i++ {
   126  		if i >= len(in.remaining) || in.remaining[i] != prefix[i] {
   127  			return false
   128  		}
   129  	}
   130  	return true
   131  }
   132  
   133  // readRune consumes and returns the next rune in the input.
   134  func (in *input) readRune() int {
   135  	if len(in.remaining) == 0 {
   136  		in.Error("internal lexer error: readRune at EOF")
   137  	}
   138  	r, size := utf8.DecodeRune(in.remaining)
   139  	in.remaining = in.remaining[size:]
   140  	if r == '\n' {
   141  		in.pos.Line++
   142  		in.pos.LineRune = 1
   143  	} else {
   144  		in.pos.LineRune++
   145  	}
   146  	in.pos.Byte += size
   147  	return int(r)
   148  }
   149  
   150  type token struct {
   151  	kind   tokenKind
   152  	pos    modfile.Position
   153  	endPos modfile.Position
   154  	text   string
   155  }
   156  
   157  type tokenKind int
   158  
   159  const (
   160  	_EOF tokenKind = -(iota + 1)
   161  	_EOLCOMMENT
   162  	_IDENT
   163  	_STRING
   164  	_COMMENT
   165  
   166  	// newlines and punctuation tokens are allowed as ASCII codes.
   167  )
   168  
   169  func (k tokenKind) isComment() bool {
   170  	return k == _COMMENT || k == _EOLCOMMENT
   171  }
   172  
   173  // isEOL returns whether a token terminates a line.
   174  func (k tokenKind) isEOL() bool {
   175  	return k == _EOF || k == _EOLCOMMENT || k == '\n'
   176  }
   177  
   178  // startToken marks the beginning of the next input token.
   179  // It must be followed by a call to endToken, once the token's text has
   180  // been consumed using readRune.
   181  func (in *input) startToken() {
   182  	in.tokenStart = in.remaining
   183  	in.token.text = ""
   184  	in.token.pos = in.pos
   185  }
   186  
   187  // endToken marks the end of an input token.
   188  // It records the actual token string in tok.text.
   189  // A single trailing newline (LF or CRLF) will be removed from comment tokens.
   190  func (in *input) endToken(kind tokenKind) {
   191  	in.token.kind = kind
   192  	text := string(in.tokenStart[:len(in.tokenStart)-len(in.remaining)])
   193  	if kind.isComment() {
   194  		if strings.HasSuffix(text, "\r\n") {
   195  			text = text[:len(text)-2]
   196  		} else {
   197  			text = strings.TrimSuffix(text, "\n")
   198  		}
   199  	}
   200  	in.token.text = text
   201  	in.token.endPos = in.pos
   202  }
   203  
   204  // peek returns the kind of the the next token returned by lex.
   205  func (in *input) peek() tokenKind {
   206  	return in.token.kind
   207  }
   208  
   209  // lex is called from the parser to obtain the next input token.
   210  func (in *input) lex() token {
   211  	tok := in.token
   212  	in.readToken()
   213  	return tok
   214  }
   215  
   216  // readToken lexes the next token from the text and stores it in in.token.
   217  func (in *input) readToken() {
   218  	// Skip past spaces, stopping at non-space or EOF.
   219  	for !in.eof() {
   220  		c := in.peekRune()
   221  		if c == ' ' || c == '\t' || c == '\r' {
   222  			in.readRune()
   223  			continue
   224  		}
   225  
   226  		// Comment runs to end of line.
   227  		if in.peekPrefix("//") {
   228  			in.startToken()
   229  
   230  			// Is this comment the only thing on its line?
   231  			// Find the last \n before this // and see if it's all
   232  			// spaces from there to here.
   233  			i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n"))
   234  			suffix := len(bytes.TrimSpace(in.complete[i+1:in.pos.Byte])) > 0
   235  			in.readRune()
   236  			in.readRune()
   237  
   238  			// Consume comment.
   239  			for len(in.remaining) > 0 && in.readRune() != '\n' {
   240  			}
   241  
   242  			// If we are at top level (not in a statement), hand the comment to
   243  			// the parser as a _COMMENT token. The grammar is written
   244  			// to handle top-level comments itself.
   245  			if !suffix {
   246  				in.endToken(_COMMENT)
   247  				return
   248  			}
   249  
   250  			// Otherwise, save comment for later attachment to syntax tree.
   251  			in.endToken(_EOLCOMMENT)
   252  			in.comments = append(in.comments, modfile.Comment{in.token.pos, in.token.text, suffix})
   253  			return
   254  		}
   255  
   256  		if in.peekPrefix("/*") {
   257  			in.Error("mod files must use // comments (not /* */ comments)")
   258  		}
   259  
   260  		// Found non-space non-comment.
   261  		break
   262  	}
   263  
   264  	// Found the beginning of the next token.
   265  	in.startToken()
   266  
   267  	// End of file.
   268  	if in.eof() {
   269  		in.endToken(_EOF)
   270  		return
   271  	}
   272  
   273  	// Punctuation tokens.
   274  	switch c := in.peekRune(); c {
   275  	case '\n', '(', ')', '[', ']', '{', '}', ',':
   276  		in.readRune()
   277  		in.endToken(tokenKind(c))
   278  		return
   279  
   280  	case '"', '`': // quoted string
   281  		quote := c
   282  		in.readRune()
   283  		for {
   284  			if in.eof() {
   285  				in.pos = in.token.pos
   286  				in.Error("unexpected EOF in string")
   287  			}
   288  			if in.peekRune() == '\n' {
   289  				in.Error("unexpected newline in string")
   290  			}
   291  			c := in.readRune()
   292  			if c == quote {
   293  				break
   294  			}
   295  			if c == '\\' && quote != '`' {
   296  				if in.eof() {
   297  					in.pos = in.token.pos
   298  					in.Error("unexpected EOF in string")
   299  				}
   300  				in.readRune()
   301  			}
   302  		}
   303  		in.endToken(_STRING)
   304  		return
   305  	}
   306  
   307  	// Checked all punctuation. Must be identifier token.
   308  	if c := in.peekRune(); !isIdent(c) {
   309  		in.Error(fmt.Sprintf("unexpected input character %#q", c))
   310  	}
   311  
   312  	// Scan over identifier.
   313  	for isIdent(in.peekRune()) {
   314  		if in.peekPrefix("//") {
   315  			break
   316  		}
   317  		if in.peekPrefix("/*") {
   318  			in.Error("mod files must use // comments (not /* */ comments)")
   319  		}
   320  		in.readRune()
   321  	}
   322  	in.endToken(_IDENT)
   323  }
   324  
   325  // isIdent reports whether c is an identifier rune.
   326  // We treat most printable runes as identifier runes, except for a handful of
   327  // ASCII punctuation characters.
   328  func isIdent(c int) bool {
   329  	switch r := rune(c); r {
   330  	case ' ', '(', ')', '[', ']', '{', '}', ',':
   331  		return false
   332  	default:
   333  		return !unicode.IsSpace(r) && unicode.IsPrint(r)
   334  	}
   335  }
   336  
   337  // Comment assignment.
   338  // We build two lists of all subexpressions, preorder and postorder.
   339  // The preorder list is ordered by start location, with outer expressions first.
   340  // The postorder list is ordered by end location, with outer expressions last.
   341  // We use the preorder list to assign each whole-line comment to the syntax
   342  // immediately following it, and we use the postorder list to assign each
   343  // end-of-line comment to the syntax immediately preceding it.
   344  
   345  // order walks the expression adding it and its subexpressions to the
   346  // preorder and postorder lists.
   347  func (in *input) order(x modfile.Expr) {
   348  	if x != nil {
   349  		in.pre = append(in.pre, x)
   350  	}
   351  	switch x := x.(type) {
   352  	default:
   353  		panic(fmt.Errorf("order: unexpected type %T", x))
   354  	case nil:
   355  		// nothing
   356  	case *modfile.LParen, *modfile.RParen:
   357  		// nothing
   358  	case *modfile.CommentBlock:
   359  		// nothing
   360  	case *modfile.Line:
   361  		// nothing
   362  	case *modfile.FileSyntax:
   363  		for _, stmt := range x.Stmt {
   364  			in.order(stmt)
   365  		}
   366  	case *modfile.LineBlock:
   367  		in.order(&x.LParen)
   368  		for _, l := range x.Line {
   369  			in.order(l)
   370  		}
   371  		in.order(&x.RParen)
   372  	}
   373  	if x != nil {
   374  		in.post = append(in.post, x)
   375  	}
   376  }
   377  
   378  // assignComments attaches comments to nearby syntax.
   379  func (in *input) assignComments() {
   380  	const debug = false
   381  
   382  	// Generate preorder and postorder lists.
   383  	in.order(in.file)
   384  
   385  	// Split into whole-line comments and suffix comments.
   386  	var line, suffix []modfile.Comment
   387  	for _, com := range in.comments {
   388  		if com.Suffix {
   389  			suffix = append(suffix, com)
   390  		} else {
   391  			line = append(line, com)
   392  		}
   393  	}
   394  
   395  	if debug {
   396  		for _, c := range line {
   397  			fmt.Fprintf(os.Stderr, "LINE %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte)
   398  		}
   399  	}
   400  
   401  	// Assign line comments to syntax immediately following.
   402  	for _, x := range in.pre {
   403  		start, _ := x.Span()
   404  		if debug {
   405  			fmt.Fprintf(os.Stderr, "pre %T :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte)
   406  		}
   407  		xcom := x.Comment()
   408  		for len(line) > 0 && start.Byte >= line[0].Start.Byte {
   409  			if debug {
   410  				fmt.Fprintf(os.Stderr, "ASSIGN LINE %q #%d\n", line[0].Token, line[0].Start.Byte)
   411  			}
   412  			xcom.Before = append(xcom.Before, line[0])
   413  			line = line[1:]
   414  		}
   415  	}
   416  
   417  	// Remaining line comments go at end of file.
   418  	in.file.After = append(in.file.After, line...)
   419  
   420  	if debug {
   421  		for _, c := range suffix {
   422  			fmt.Fprintf(os.Stderr, "SUFFIX %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte)
   423  		}
   424  	}
   425  
   426  	// Assign suffix comments to syntax immediately before.
   427  	for i := len(in.post) - 1; i >= 0; i-- {
   428  		x := in.post[i]
   429  
   430  		start, end := x.Span()
   431  		if debug {
   432  			fmt.Fprintf(os.Stderr, "post %T :%d:%d #%d :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte, end.Line, end.LineRune, end.Byte)
   433  		}
   434  
   435  		// Do not assign suffix comments to end of line block or whole file.
   436  		// Instead assign them to the last element inside.
   437  		switch x.(type) {
   438  		case *modfile.FileSyntax:
   439  			continue
   440  		}
   441  
   442  		// Do not assign suffix comments to something that starts
   443  		// on an earlier line, so that in
   444  		//
   445  		//	x ( y
   446  		//		z ) // comment
   447  		//
   448  		// we assign the comment to z and not to x ( ... ).
   449  		if start.Line != end.Line {
   450  			continue
   451  		}
   452  		xcom := x.Comment()
   453  		for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte {
   454  			if debug {
   455  				fmt.Fprintf(os.Stderr, "ASSIGN SUFFIX %q #%d\n", suffix[len(suffix)-1].Token, suffix[len(suffix)-1].Start.Byte)
   456  			}
   457  			xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1])
   458  			suffix = suffix[:len(suffix)-1]
   459  		}
   460  	}
   461  
   462  	// We assigned suffix comments in reverse.
   463  	// If multiple suffix comments were appended to the same
   464  	// expression node, they are now in reverse. Fix that.
   465  	for _, x := range in.post {
   466  		reverseComments(x.Comment().Suffix)
   467  	}
   468  
   469  	// Remaining suffix comments go at beginning of file.
   470  	in.file.Before = append(in.file.Before, suffix...)
   471  }
   472  
   473  // reverseComments reverses the []Comment list.
   474  func reverseComments(list []modfile.Comment) {
   475  	for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 {
   476  		list[i], list[j] = list[j], list[i]
   477  	}
   478  }
   479  
   480  func (in *input) parseFile() {
   481  	in.file = new(modfile.FileSyntax)
   482  	var cb *modfile.CommentBlock
   483  	for {
   484  		switch in.peek() {
   485  		case '\n':
   486  			in.lex()
   487  			if cb != nil {
   488  				in.file.Stmt = append(in.file.Stmt, cb)
   489  				cb = nil
   490  			}
   491  		case _COMMENT:
   492  			tok := in.lex()
   493  			if cb == nil {
   494  				cb = &modfile.CommentBlock{Start: tok.pos}
   495  			}
   496  			com := cb.Comment()
   497  			com.Before = append(com.Before, modfile.Comment{Start: tok.pos, Token: tok.text})
   498  		case _EOF:
   499  			if cb != nil {
   500  				in.file.Stmt = append(in.file.Stmt, cb)
   501  			}
   502  			return
   503  		default:
   504  			in.parseStmt()
   505  			if cb != nil {
   506  				in.file.Stmt[len(in.file.Stmt)-1].Comment().Before = cb.Before
   507  				cb = nil
   508  			}
   509  		}
   510  	}
   511  }
   512  
   513  func (in *input) parseStmt() {
   514  	tok := in.lex()
   515  	start := tok.pos
   516  	end := tok.endPos
   517  	tokens := []string{tok.text}
   518  	for {
   519  		tok := in.lex()
   520  		switch {
   521  		case tok.kind.isEOL():
   522  			in.file.Stmt = append(in.file.Stmt, &modfile.Line{
   523  				Start: start,
   524  				Token: tokens,
   525  				End:   end,
   526  			})
   527  			return
   528  
   529  		case tok.kind == '(':
   530  			if next := in.peek(); next.isEOL() {
   531  				// Start of block: no more tokens on this line.
   532  				in.file.Stmt = append(in.file.Stmt, in.parseLineBlock(start, tokens, tok))
   533  				return
   534  			} else if next == ')' {
   535  				rparen := in.lex()
   536  				if in.peek().isEOL() {
   537  					// Empty block.
   538  					in.lex()
   539  					in.file.Stmt = append(in.file.Stmt, &modfile.LineBlock{
   540  						Start:  start,
   541  						Token:  tokens,
   542  						LParen: modfile.LParen{Pos: tok.pos},
   543  						RParen: modfile.RParen{Pos: rparen.pos},
   544  					})
   545  					return
   546  				}
   547  				// '( )' in the middle of the line, not a block.
   548  				tokens = append(tokens, tok.text, rparen.text)
   549  			} else {
   550  				// '(' in the middle of the line, not a block.
   551  				tokens = append(tokens, tok.text)
   552  			}
   553  
   554  		default:
   555  			tokens = append(tokens, tok.text)
   556  			end = tok.endPos
   557  		}
   558  	}
   559  }
   560  
   561  func (in *input) parseLineBlock(start modfile.Position, token []string, lparen token) *modfile.LineBlock {
   562  	x := &modfile.LineBlock{
   563  		Start:  start,
   564  		Token:  token,
   565  		LParen: modfile.LParen{Pos: lparen.pos},
   566  	}
   567  	var comments []modfile.Comment
   568  	for {
   569  		switch in.peek() {
   570  		case _EOLCOMMENT:
   571  			// Suffix comment, will be attached later by assignComments.
   572  			in.lex()
   573  		case '\n':
   574  			// Blank line. Add an empty comment to preserve it.
   575  			in.lex()
   576  			if len(comments) == 0 && len(x.Line) > 0 || len(comments) > 0 && comments[len(comments)-1].Token != "" {
   577  				comments = append(comments, modfile.Comment{})
   578  			}
   579  		case _COMMENT:
   580  			tok := in.lex()
   581  			comments = append(comments, modfile.Comment{Start: tok.pos, Token: tok.text})
   582  		case _EOF:
   583  			in.Error(fmt.Sprintf("syntax error (unterminated block started at %s:%d:%d)", in.filename, x.Start.Line, x.Start.LineRune))
   584  		case ')':
   585  			rparen := in.lex()
   586  			x.RParen.Before = comments
   587  			x.RParen.Pos = rparen.pos
   588  			if !in.peek().isEOL() {
   589  				in.Error("syntax error (expected newline after closing paren)")
   590  			}
   591  			in.lex()
   592  			return x
   593  		default:
   594  			l := in.parseLine()
   595  			x.Line = append(x.Line, l)
   596  			l.Comment().Before = comments
   597  			comments = nil
   598  		}
   599  	}
   600  }
   601  
   602  func (in *input) parseLine() *modfile.Line {
   603  	tok := in.lex()
   604  	if tok.kind.isEOL() {
   605  		in.Error("internal parse error: parseLine at end of line")
   606  	}
   607  	start := tok.pos
   608  	end := tok.endPos
   609  	tokens := []string{tok.text}
   610  	for {
   611  		tok := in.lex()
   612  		if tok.kind.isEOL() {
   613  			return &modfile.Line{
   614  				Start:   start,
   615  				Token:   tokens,
   616  				End:     end,
   617  				InBlock: true,
   618  			}
   619  		}
   620  		tokens = append(tokens, tok.text)
   621  		end = tok.endPos
   622  	}
   623  }
   624  
   625  var (
   626  	slashSlash = []byte("//")
   627  	moduleStr  = []byte("module")
   628  )
   629  
   630  // ModulePath returns the module path from the gomod file text.
   631  // If it cannot find a module path, it returns an empty string.
   632  // It is tolerant of unrelated problems in the go.mod file.
   633  func ModulePath(mod []byte) string {
   634  	for len(mod) > 0 {
   635  		line := mod
   636  		mod = nil
   637  		if i := bytes.IndexByte(line, '\n'); i >= 0 {
   638  			line, mod = line[:i], line[i+1:]
   639  		}
   640  		if i := bytes.Index(line, slashSlash); i >= 0 {
   641  			line = line[:i]
   642  		}
   643  		line = bytes.TrimSpace(line)
   644  		if !bytes.HasPrefix(line, moduleStr) {
   645  			continue
   646  		}
   647  		line = line[len(moduleStr):]
   648  		n := len(line)
   649  		line = bytes.TrimSpace(line)
   650  		if len(line) == n || len(line) == 0 {
   651  			continue
   652  		}
   653  
   654  		if line[0] == '"' || line[0] == '`' {
   655  			p, err := strconv.Unquote(string(line))
   656  			if err != nil {
   657  				return "" // malformed quoted string or multiline module path
   658  			}
   659  			return p
   660  		}
   661  
   662  		return string(line)
   663  	}
   664  	return "" // missing module path
   665  }
   666  
   667  func modulePathMajor(path string) (string, error) {
   668  	_, major, ok := module.SplitPathVersion(path)
   669  	if !ok {
   670  		return "", fmt.Errorf("invalid module path")
   671  	}
   672  	return major, nil
   673  }
   674  
   675  func parseString(s *string) (string, error) {
   676  	t := *s
   677  	if strings.HasPrefix(t, `"`) {
   678  		var err error
   679  		if t, err = strconv.Unquote(t); err != nil {
   680  			return "", err
   681  		}
   682  	} else if strings.ContainsAny(t, "\"'`") {
   683  		// Other quotes are reserved both for possible future expansion
   684  		// and to avoid confusion. For example if someone types 'x'
   685  		// we want that to be a syntax error and not a literal x in literal quotation marks.
   686  		return "", fmt.Errorf("unquoted string cannot contain quote")
   687  	}
   688  	*s = modfile.AutoQuote(t)
   689  	return t, nil
   690  }
   691  
   692  func parseVersion(verb string, path string, s *string) (string, error) {
   693  	t, err := parseString(s)
   694  	if err != nil {
   695  		return "", &modfile.Error{
   696  			Verb:    verb,
   697  			ModPath: path,
   698  			Err: &module.InvalidVersionError{
   699  				Version: *s,
   700  				Err:     err,
   701  			},
   702  		}
   703  	}
   704  
   705  	cv := module.CanonicalVersion(t)
   706  	if cv == "" {
   707  		return "", &modfile.Error{
   708  			Verb:    verb,
   709  			ModPath: path,
   710  			Err: &module.InvalidVersionError{
   711  				Version: t,
   712  				Err:     errors.New("must be of the form v1.2.3"),
   713  			},
   714  		}
   715  	}
   716  
   717  	*s = cv
   718  	return *s, nil
   719  }
   720  
   721  func parseReplace(filename string, line *modfile.Line, verb string, args []string) (*modfile.Replace, *modfile.Error) {
   722  	wrapModPathError := func(modPath string, err error) *modfile.Error {
   723  		return &modfile.Error{
   724  			Filename: filename,
   725  			Pos:      line.Start,
   726  			ModPath:  modPath,
   727  			Verb:     verb,
   728  			Err:      err,
   729  		}
   730  	}
   731  	wrapError := func(err error) *modfile.Error {
   732  		return &modfile.Error{
   733  			Filename: filename,
   734  			Pos:      line.Start,
   735  			Err:      err,
   736  		}
   737  	}
   738  	errorf := func(format string, args ...interface{}) *modfile.Error {
   739  		return wrapError(fmt.Errorf(format, args...))
   740  	}
   741  
   742  	arrow := 2
   743  	if len(args) >= 2 && args[1] == "=>" {
   744  		arrow = 1
   745  	}
   746  	if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" {
   747  		return nil, errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb)
   748  	}
   749  	s, err := parseString(&args[0])
   750  	if err != nil {
   751  		return nil, errorf("invalid quoted string: %v", err)
   752  	}
   753  	pathMajor, err := modulePathMajor(s)
   754  	if err != nil {
   755  		return nil, wrapModPathError(s, err)
   756  	}
   757  	var v string
   758  	if arrow == 2 {
   759  		v, err = parseVersion(verb, s, &args[1])
   760  		if err != nil {
   761  			return nil, wrapError(err)
   762  		}
   763  		if err := module.CheckPathMajor(v, pathMajor); err != nil {
   764  			return nil, wrapModPathError(s, err)
   765  		}
   766  	}
   767  	ns, err := parseString(&args[arrow+1])
   768  	if err != nil {
   769  		return nil, errorf("invalid quoted string: %v", err)
   770  	}
   771  	nv := ""
   772  	if len(args) == arrow+2 {
   773  		if !modfile.IsDirectoryPath(ns) {
   774  			if strings.Contains(ns, "@") {
   775  				return nil, errorf("replacement module must match format 'path version', not 'path@version'")
   776  			}
   777  			return nil, errorf("replacement module without version must be directory path (rooted or starting with . or ..)")
   778  		}
   779  		if filepath.Separator == '/' && strings.Contains(ns, `\`) {
   780  			return nil, errorf("replacement directory appears to be Windows path (on a non-windows system)")
   781  		}
   782  	}
   783  	if len(args) == arrow+3 {
   784  		nv, err = parseVersion(verb, ns, &args[arrow+2])
   785  		if err != nil {
   786  			return nil, wrapError(err)
   787  		}
   788  		if modfile.IsDirectoryPath(ns) {
   789  			return nil, errorf("replacement module directory path %q cannot have version", ns)
   790  		}
   791  	}
   792  	return &modfile.Replace{
   793  		Old:    module.Version{Path: s, Version: v},
   794  		New:    module.Version{Path: ns, Version: nv},
   795  		Syntax: line,
   796  	}, nil
   797  }
   798  
   799  var reDeprecation = regexp.MustCompile(`(?s)(?:^|\n\n)Deprecated: *(.*?)(?:$|\n\n)`)
   800  
   801  // parseDeprecation extracts the text of comments on a "module" directive and
   802  // extracts a deprecation message from that.
   803  //
   804  // A deprecation message is contained in a paragraph within a block of comments
   805  // that starts with "Deprecated:" (case sensitive). The message runs until the
   806  // end of the paragraph and does not include the "Deprecated:" prefix. If the
   807  // comment block has multiple paragraphs that start with "Deprecated:",
   808  // parseDeprecation returns the message from the first.
   809  func parseDeprecation(block *modfile.LineBlock, line *modfile.Line) string {
   810  	text := parseDirectiveComment(block, line)
   811  	m := reDeprecation.FindStringSubmatch(text)
   812  	if m == nil {
   813  		return ""
   814  	}
   815  	return m[1]
   816  }
   817  
   818  // parseDirectiveComment extracts the text of comments on a directive.
   819  // If the directive's lien does not have comments and is part of a block that
   820  // does have comments, the block's comments are used.
   821  func parseDirectiveComment(block *modfile.LineBlock, line *modfile.Line) string {
   822  	comments := line.Comment()
   823  	if block != nil && len(comments.Before) == 0 && len(comments.Suffix) == 0 {
   824  		comments = block.Comment()
   825  	}
   826  	groups := [][]modfile.Comment{comments.Before, comments.Suffix}
   827  	var lines []string
   828  	for _, g := range groups {
   829  		for _, c := range g {
   830  			if !strings.HasPrefix(c.Token, "//") {
   831  				continue // blank line
   832  			}
   833  			lines = append(lines, strings.TrimSpace(strings.TrimPrefix(c.Token, "//")))
   834  		}
   835  	}
   836  	return strings.Join(lines, "\n")
   837  }
   838  
   839  // parseDraft returns whether the module is marked as draft.
   840  func parseDraft(block *modfile.CommentBlock) bool {
   841  	if len(block.Before) != 1 {
   842  		return false
   843  	}
   844  	comment := block.Before[0]
   845  	if strings.TrimSpace(strings.TrimPrefix(comment.Token, "//")) != "Draft" {
   846  		return false
   847  	}
   848  	return true
   849  }
   850  
   851  // markLineAsRemoved modifies line so that it (and its end-of-line comment, if any)
   852  // will be dropped by (*FileSyntax).Cleanup.
   853  func markLineAsRemoved(line *modfile.Line) {
   854  	line.Token = nil
   855  	line.Comments.Suffix = nil
   856  }
   857  
   858  func updateLine(line *modfile.Line, tokens ...string) {
   859  	if line.InBlock {
   860  		tokens = tokens[1:]
   861  	}
   862  	line.Token = tokens
   863  }
   864  
   865  // setIndirect sets line to have (or not have) a "// indirect" comment.
   866  func setIndirect(r *modfile.Require, indirect bool) {
   867  	r.Indirect = indirect
   868  	line := r.Syntax
   869  	if isIndirect(line) == indirect {
   870  		return
   871  	}
   872  	if indirect {
   873  		// Adding comment.
   874  		if len(line.Suffix) == 0 {
   875  			// New comment.
   876  			line.Suffix = []modfile.Comment{{Token: "// indirect", Suffix: true}}
   877  			return
   878  		}
   879  
   880  		com := &line.Suffix[0]
   881  		text := strings.TrimSpace(strings.TrimPrefix(com.Token, string(slashSlash)))
   882  		if text == "" {
   883  			// Empty comment.
   884  			com.Token = "// indirect"
   885  			return
   886  		}
   887  
   888  		// Insert at beginning of existing comment.
   889  		com.Token = "// indirect; " + text
   890  		return
   891  	}
   892  
   893  	// Removing comment.
   894  	f := strings.TrimSpace(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash)))
   895  	if f == "indirect" {
   896  		// Remove whole comment.
   897  		line.Suffix = nil
   898  		return
   899  	}
   900  
   901  	// Remove comment prefix.
   902  	com := &line.Suffix[0]
   903  	i := strings.Index(com.Token, "indirect;")
   904  	com.Token = "//" + com.Token[i+len("indirect;"):]
   905  }
   906  
   907  // isIndirect reports whether line has a "// indirect" comment,
   908  // meaning it is in go.mod only for its effect on indirect dependencies,
   909  // so that it can be dropped entirely once the effective version of the
   910  // indirect dependency reaches the given minimum version.
   911  func isIndirect(line *modfile.Line) bool {
   912  	if len(line.Suffix) == 0 {
   913  		return false
   914  	}
   915  	f := strings.Fields(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash)))
   916  	return (len(f) == 1 && f[0] == "indirect" || len(f) > 1 && f[0] == "indirect;")
   917  }
   918  
   919  // addLine adds a line containing the given tokens to the file.
   920  //
   921  // If the first token of the hint matches the first token of the
   922  // line, the new line is added at the end of the block containing hint,
   923  // extracting hint into a new block if it is not yet in one.
   924  //
   925  // If the hint is non-nil buts its first token does not match,
   926  // the new line is added after the block containing hint
   927  // (or hint itself, if not in a block).
   928  //
   929  // If no hint is provided, addLine appends the line to the end of
   930  // the last block with a matching first token,
   931  // or to the end of the file if no such block exists.
   932  func addLine(x *modfile.FileSyntax, hint modfile.Expr, tokens ...string) *modfile.Line {
   933  	if hint == nil {
   934  		// If no hint given, add to the last statement of the given type.
   935  	Loop:
   936  		for i := len(x.Stmt) - 1; i >= 0; i-- {
   937  			stmt := x.Stmt[i]
   938  			switch stmt := stmt.(type) {
   939  			case *modfile.Line:
   940  				if stmt.Token != nil && stmt.Token[0] == tokens[0] {
   941  					hint = stmt
   942  					break Loop
   943  				}
   944  			case *modfile.LineBlock:
   945  				if stmt.Token[0] == tokens[0] {
   946  					hint = stmt
   947  					break Loop
   948  				}
   949  			}
   950  		}
   951  	}
   952  
   953  	newLineAfter := func(i int) *modfile.Line {
   954  		newl := &modfile.Line{Token: tokens}
   955  		if i == len(x.Stmt) {
   956  			x.Stmt = append(x.Stmt, newl)
   957  		} else {
   958  			x.Stmt = append(x.Stmt, nil)
   959  			copy(x.Stmt[i+2:], x.Stmt[i+1:])
   960  			x.Stmt[i+1] = newl
   961  		}
   962  		return newl
   963  	}
   964  
   965  	if hint != nil {
   966  		for i, stmt := range x.Stmt {
   967  			switch stmt := stmt.(type) {
   968  			case *modfile.Line:
   969  				if stmt == hint {
   970  					if stmt.Token == nil || stmt.Token[0] != tokens[0] {
   971  						return newLineAfter(i)
   972  					}
   973  
   974  					// Convert line to line block.
   975  					stmt.InBlock = true
   976  					block := &modfile.LineBlock{Token: stmt.Token[:1], Line: []*modfile.Line{stmt}}
   977  					stmt.Token = stmt.Token[1:]
   978  					x.Stmt[i] = block
   979  					newl := &modfile.Line{Token: tokens[1:], InBlock: true}
   980  					block.Line = append(block.Line, newl)
   981  					return newl
   982  				}
   983  
   984  			case *modfile.LineBlock:
   985  				if stmt == hint {
   986  					if stmt.Token[0] != tokens[0] {
   987  						return newLineAfter(i)
   988  					}
   989  
   990  					newl := &modfile.Line{Token: tokens[1:], InBlock: true}
   991  					stmt.Line = append(stmt.Line, newl)
   992  					return newl
   993  				}
   994  
   995  				for j, line := range stmt.Line {
   996  					if line == hint {
   997  						if stmt.Token[0] != tokens[0] {
   998  							return newLineAfter(i)
   999  						}
  1000  
  1001  						// Add new line after hint within the block.
  1002  						stmt.Line = append(stmt.Line, nil)
  1003  						copy(stmt.Line[j+2:], stmt.Line[j+1:])
  1004  						newl := &modfile.Line{Token: tokens[1:], InBlock: true}
  1005  						stmt.Line[j+1] = newl
  1006  						return newl
  1007  					}
  1008  				}
  1009  			}
  1010  		}
  1011  	}
  1012  
  1013  	newl := &modfile.Line{Token: tokens}
  1014  	x.Stmt = append(x.Stmt, newl)
  1015  	return newl
  1016  }
  1017  
  1018  func addReplace(syntax *modfile.FileSyntax, replace *[]*modfile.Replace, oldPath, oldVers, newPath, newVers string) error {
  1019  	need := true
  1020  	oldv := module.Version{Path: oldPath, Version: oldVers}
  1021  	newv := module.Version{Path: newPath, Version: newVers}
  1022  	tokens := []string{"replace", modfile.AutoQuote(oldPath)}
  1023  	if oldVers != "" {
  1024  		tokens = append(tokens, oldVers)
  1025  	}
  1026  	tokens = append(tokens, "=>", modfile.AutoQuote(newPath))
  1027  	if newVers != "" {
  1028  		tokens = append(tokens, newVers)
  1029  	}
  1030  
  1031  	var hint *modfile.Line
  1032  	for _, r := range *replace {
  1033  		if r.Old.Path == oldPath && (oldVers == "" || r.Old.Version == oldVers) {
  1034  			if need {
  1035  				// Found replacement for old; update to use new.
  1036  				r.New = newv
  1037  				updateLine(r.Syntax, tokens...)
  1038  				need = false
  1039  				continue
  1040  			}
  1041  			// Already added; delete other replacements for same.
  1042  			markLineAsRemoved(r.Syntax)
  1043  			*r = modfile.Replace{}
  1044  		}
  1045  		if r.Old.Path == oldPath {
  1046  			hint = r.Syntax
  1047  		}
  1048  	}
  1049  	if need {
  1050  		*replace = append(*replace, &modfile.Replace{Old: oldv, New: newv, Syntax: addLine(syntax, hint, tokens...)})
  1051  	}
  1052  	return nil
  1053  }