github.com/boyter/gocodewalker@v1.3.2/go-gitignore/lexer_test.go (about)

     1  // SPDX-License-Identifier: MIT
     2  
     3  package gitignore_test
     4  
     5  import (
     6  	"fmt"
     7  	"github.com/boyter/gocodewalker/go-gitignore"
     8  	"strings"
     9  	"testing"
    10  )
    11  
    12  // TestLexerNewLne tests the behavour of the gitignore.Lexer when the input
    13  // data explicitly uses "\n" as the line separator
    14  func TestLexerNewLine(t *testing.T) {
    15  	// split the test content into lines
    16  	//		- ensure we handle "\n" and "\r" correctly
    17  	//		- since this test file is written on a system that uses "\n"
    18  	//		  to designate end of line, this should be unnecessary, but it's
    19  	//		  possible for the file line endings to be converted outside of
    20  	//		  this repository, so we are thorough here to ensure the test
    21  	//		  works as expected everywhere
    22  	_content := strings.Split(_GITIGNORE, "\n")
    23  	for _i := 0; _i < len(_content); _i++ {
    24  		_content[_i] = strings.TrimSuffix(_content[_i], "\r")
    25  	}
    26  
    27  	// perform the Lexer test with input explicitly separated by "\n"
    28  	lexer(t, _content, "\n", _GITTOKENS, nil)
    29  } // TestLexerNewLine()
    30  
    31  // TestLexerCarriageReturn tests the behavour of the gitignore.Lexer when the
    32  // input data explicitly uses "\r\n" as the line separator
    33  func TestLexerCarriageReturn(t *testing.T) {
    34  	// split the test content into lines
    35  	//		- see above
    36  	_content := strings.Split(_GITIGNORE, "\n")
    37  	for _i := 0; _i < len(_content); _i++ {
    38  		_content[_i] = strings.TrimSuffix(_content[_i], "\r")
    39  	}
    40  
    41  	// perform the Lexer test with input explicitly separated by "\r\n"
    42  	lexer(t, _content, "\r\n", _GITTOKENS, nil)
    43  } // TestLexerCarriageReturn()
    44  
    45  func TestLexerInvalidNewLine(t *testing.T) {
    46  	// perform the Lexer test with invalid input separated by "\n"
    47  	//		- the source content is manually constructed with "\n" as EOL
    48  	_content := strings.Split(_GITINVALID, "\n")
    49  	lexer(t, _content, "\n", _TOKENSINVALID, gitignore.CarriageReturnError)
    50  } // TestLexerInvalidNewLine()
    51  
    52  func TestLexerInvalidCarriageReturn(t *testing.T) {
    53  	// perform the Lexer test with invalid input separated by "\n"
    54  	//		- the source content is manually constructed with "\n" as EOL
    55  	_content := strings.Split(_GITINVALID, "\n")
    56  	lexer(t, _content, "\r\n", _TOKENSINVALID, gitignore.CarriageReturnError)
    57  } // TestLexerInvalidCarriageReturn()
    58  
    59  func lexer(t *testing.T, lines []string, eol string, tokens []token, e error) {
    60  	// create a temporary .gitignore
    61  	_buffer, _err := buffer(strings.Join(lines, eol))
    62  	if _err != nil {
    63  		t.Fatalf("unable to create temporary .gitignore: %s", _err.Error())
    64  	}
    65  
    66  	// ensure we have a non-nil Lexer instance
    67  	_lexer := gitignore.NewLexer(_buffer)
    68  	if _lexer == nil {
    69  		t.Error("expected non-nil Lexer instance; nil found")
    70  	}
    71  
    72  	// ensure the stream of tokens is as we expect
    73  	for _, _expected := range tokens {
    74  		_position := _lexer.Position()
    75  
    76  		// ensure the string form of the Lexer reports the correct position
    77  		_string := fmt.Sprintf("%d:%d", _position.Line, _position.Column)
    78  		if _lexer.String() != _string {
    79  			t.Errorf(
    80  				"lexer string mismatch; expected %q, got %q",
    81  				_string, _position.String(),
    82  			)
    83  		}
    84  
    85  		// extract the next token from the lexer
    86  		_got, _err := _lexer.Next()
    87  
    88  		// ensure we did not receive an error and the token is as expected
    89  		if _err != nil {
    90  			// if we expect an error during processing, check to see if
    91  			// the received error is as expected
    92  			//			if !_err.Is(e) {
    93  			if _err.Underlying() != e {
    94  				t.Fatalf(
    95  					"unable to retrieve expected token; %s at %s",
    96  					_err.Error(), pos(_err.Position()),
    97  				)
    98  			}
    99  		}
   100  
   101  		// did we receive a token?
   102  		if _got == nil {
   103  			t.Fatalf("expected token at %s; none found", _lexer)
   104  		} else if _got.Type != _expected.Type {
   105  			t.Fatalf(
   106  				"token type mismatch; expected type %d, got %d [%s]",
   107  				_expected.Type, _got.Type, _got,
   108  			)
   109  		} else if _got.Name() != _expected.Name {
   110  			t.Fatalf(
   111  				"token name mismatch; expected name %q, got %q [%s]",
   112  				_expected.Name, _got.Name(), _got,
   113  			)
   114  		} else {
   115  			// ensure the extracted token string matches expectation
   116  			//		- we handle EOL separately, since it can change based
   117  			//		  on the end of line sequence of the input file
   118  			_same := _got.Token() == _expected.Token
   119  			if _got.Type == gitignore.EOL {
   120  				_same = _got.Token() == eol
   121  			}
   122  			if !_same {
   123  				t.Fatalf(
   124  					"token value mismatch; expected name %q, got %q [%s]",
   125  					_expected.Token, _got.Token(), _got,
   126  				)
   127  			}
   128  
   129  			// ensure the token position matches the original lexer position
   130  			if !coincident(_got.Position, _position) {
   131  				t.Fatalf(
   132  					"token position mismatch for %s; expected %s, got %s",
   133  					_got, pos(_position), pos(_got.Position),
   134  				)
   135  			}
   136  
   137  			// ensure the token position matches the expected position
   138  			//		- since we will be testing with different line endings, we
   139  			//		  have to choose the correct offset
   140  			_position := gitignore.Position{
   141  				File:   "",
   142  				Line:   _expected.Line,
   143  				Column: _expected.Column,
   144  				Offset: _expected.NewLine,
   145  			}
   146  			if eol == "\r\n" {
   147  				_position.Offset = _expected.CarriageReturn
   148  			}
   149  			if !coincident(_got.Position, _position) {
   150  				t.Log(pos(_got.Position) + "\t" + _got.String())
   151  				t.Fatalf(
   152  					"token position mismatch; expected %s, got %s",
   153  					pos(_position), pos(_got.Position),
   154  				)
   155  			}
   156  		}
   157  	}
   158  
   159  	// ensure there are no more tokens
   160  	_next, _err := _lexer.Next()
   161  	if _err != nil {
   162  		t.Errorf("unexpected error on end of token test: %s", _err.Error())
   163  	} else if _next == nil {
   164  		t.Errorf("unexpected nil token at end of test")
   165  	} else if _next.Type != gitignore.EOF {
   166  		t.Errorf(
   167  			"token type mismatch; expected type %d, got %d [%s]",
   168  			gitignore.EOF, _next.Type, _next,
   169  		)
   170  	}
   171  } // TestLexer()