github.com/shogo82148/std@v1.22.1-0.20240327122250-4e474527810c/cmd/asm/internal/lex/lex.go (about)

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package lex implements lexical analysis for the assembler.
     6  package lex
     7  
     8  import (
     9  	"github.com/shogo82148/std/cmd/internal/src"
    10  )
    11  
    12  // A ScanToken represents an input item. It is a simple wrapping of rune, as
    13  // returned by text/scanner.Scanner, plus a couple of extra values.
    14  type ScanToken rune
    15  
    16  const (
    17  	// Asm defines some two-character lexemes. We make up
    18  	// a rune/ScanToken value for them - ugly but simple.
    19  	LSH ScanToken = -1000 - iota
    20  	RSH
    21  	ARR
    22  	ROT
    23  	Include
    24  	BuildComment
    25  )
    26  
    27  // IsRegisterShift reports whether the token is one of the ARM register shift operators.
    28  func IsRegisterShift(r ScanToken) bool
    29  
    30  func (t ScanToken) String() string
    31  
    32  // NewLexer returns a lexer for the named file and the given link context.
    33  func NewLexer(name string) TokenReader
    34  
    35  // A TokenReader is like a reader, but returns lex tokens of type Token. It also can tell you what
    36  // the text of the most recently returned token is, and where it was found.
    37  // The underlying scanner elides all spaces except newline, so the input looks like a stream of
    38  // Tokens; original spacing is lost but we don't need it.
    39  type TokenReader interface {
    40  	Next() ScanToken
    41  
    42  	Text() string
    43  
    44  	File() string
    45  
    46  	Base() *src.PosBase
    47  
    48  	SetBase(*src.PosBase)
    49  
    50  	Line() int
    51  
    52  	Col() int
    53  
    54  	Close()
    55  }
    56  
    57  // A Token is a scan token plus its string value.
    58  // A macro is stored as a sequence of Tokens with spaces stripped.
    59  type Token struct {
    60  	ScanToken
    61  	text string
    62  }
    63  
    64  // Make returns a Token with the given rune (ScanToken) and text representation.
    65  func Make(token ScanToken, text string) Token
    66  
    67  func (l Token) String() string
    68  
    69  // A Macro represents the definition of a #defined macro.
    70  type Macro struct {
    71  	name   string
    72  	args   []string
    73  	tokens []Token
    74  }
    75  
    76  // Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.
    77  func Tokenize(str string) []Token