github.com/shogo82148/std@v1.22.1-0.20240327122250-4e474527810c/cmd/asm/internal/lex/tokenizer.go (about)

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package lex
     6  
     7  import (
     8  	"github.com/shogo82148/std/io"
     9  	"github.com/shogo82148/std/os"
    10  	"github.com/shogo82148/std/text/scanner"
    11  
    12  	"github.com/shogo82148/std/cmd/internal/src"
    13  )
    14  
    15  // A Tokenizer is a simple wrapping of text/scanner.Scanner, configured
    16  // for our purposes and made a TokenReader. It forms the lowest level,
    17  // turning text from readers into tokens.
    18  type Tokenizer struct {
    19  	tok  ScanToken
    20  	s    *scanner.Scanner
    21  	base *src.PosBase
    22  	line int
    23  	file *os.File
    24  }
    25  
    26  func NewTokenizer(name string, r io.Reader, file *os.File) *Tokenizer
    27  
    28  func (t *Tokenizer) Text() string
    29  
    30  func (t *Tokenizer) File() string
    31  
    32  func (t *Tokenizer) Base() *src.PosBase
    33  
    34  func (t *Tokenizer) SetBase(base *src.PosBase)
    35  
    36  func (t *Tokenizer) Line() int
    37  
    38  func (t *Tokenizer) Col() int
    39  
    40  func (t *Tokenizer) Next() ScanToken
    41  
    42  func (t *Tokenizer) Close()