github.com/lambdatest/go-gitignore@v0.0.0-20230214141342-7fe15342e580/lexer_test.go (about)

     1  package gitignore_test
     2  
     3  import (
     4  	"fmt"
     5  	"strings"
     6  	"testing"
     7  
     8  	"github.com/denormal/go-gitignore"
     9  )
    10  
    11  // TestLexerNewLne tests the behavour of the gitignore.Lexer when the input
    12  // data explicitly uses "\n" as the line separator
    13  func TestLexerNewLine(t *testing.T) {
    14  	// split the test content into lines
    15  	//		- ensure we handle "\n" and "\r" correctly
    16  	//		- since this test file is written on a system that uses "\n"
    17  	//		  to designate end of line, this should be unnecessary, but it's
    18  	//		  possible for the file line endings to be converted outside of
    19  	//		  this repository, so we are thorough here to ensure the test
    20  	//		  works as expected everywhere
    21  	_content := strings.Split(_GITIGNORE, "\n")
    22  	for _i := 0; _i < len(_content); _i++ {
    23  		_content[_i] = strings.TrimSuffix(_content[_i], "\r")
    24  	}
    25  
    26  	// perform the Lexer test with input explicitly separated by "\n"
    27  	lexer(t, _content, "\n", _GITTOKENS, nil)
    28  } // TestLexerNewLine()
    29  
    30  // TestLexerCarriageReturn tests the behavour of the gitignore.Lexer when the
    31  // input data explicitly uses "\r\n" as the line separator
    32  func TestLexerCarriageReturn(t *testing.T) {
    33  	// split the test content into lines
    34  	//		- see above
    35  	_content := strings.Split(_GITIGNORE, "\n")
    36  	for _i := 0; _i < len(_content); _i++ {
    37  		_content[_i] = strings.TrimSuffix(_content[_i], "\r")
    38  	}
    39  
    40  	// perform the Lexer test with input explicitly separated by "\r\n"
    41  	lexer(t, _content, "\r\n", _GITTOKENS, nil)
    42  } // TestLexerCarriageReturn()
    43  
    44  func TestLexerInvalidNewLine(t *testing.T) {
    45  	// perform the Lexer test with invalid input separated by "\n"
    46  	//		- the source content is manually constructed with "\n" as EOL
    47  	_content := strings.Split(_GITINVALID, "\n")
    48  	lexer(t, _content, "\n", _TOKENSINVALID, gitignore.CarriageReturnError)
    49  } // TestLexerInvalidNewLine()
    50  
    51  func TestLexerInvalidCarriageReturn(t *testing.T) {
    52  	// perform the Lexer test with invalid input separated by "\n"
    53  	//		- the source content is manually constructed with "\n" as EOL
    54  	_content := strings.Split(_GITINVALID, "\n")
    55  	lexer(t, _content, "\r\n", _TOKENSINVALID, gitignore.CarriageReturnError)
    56  } // TestLexerInvalidCarriageReturn()
    57  
    58  func lexer(t *testing.T, lines []string, eol string, tokens []token, e error) {
    59  	// create a temporary .gitignore
    60  	_buffer, _err := buffer(strings.Join(lines, eol))
    61  	if _err != nil {
    62  		t.Fatalf("unable to create temporary .gitignore: %s", _err.Error())
    63  	}
    64  
    65  	// ensure we have a non-nil Lexer instance
    66  	_lexer := gitignore.NewLexer(_buffer)
    67  	if _lexer == nil {
    68  		t.Error("expected non-nil Lexer instance; nil found")
    69  	}
    70  
    71  	// ensure the stream of tokens is as we expect
    72  	for _, _expected := range tokens {
    73  		_position := _lexer.Position()
    74  
    75  		// ensure the string form of the Lexer reports the correct position
    76  		_string := fmt.Sprintf("%d:%d", _position.Line, _position.Column)
    77  		if _lexer.String() != _string {
    78  			t.Errorf(
    79  				"lexer string mismatch; expected %q, got %q",
    80  				_string, _position.String(),
    81  			)
    82  		}
    83  
    84  		// extract the next token from the lexer
    85  		_got, _err := _lexer.Next()
    86  
    87  		// ensure we did not receive an error and the token is as expected
    88  		if _err != nil {
    89  			// if we expect an error during processing, check to see if
    90  			// the received error is as expected
    91  			//			if !_err.Is(e) {
    92  			if _err.Underlying() != e {
    93  				t.Fatalf(
    94  					"unable to retrieve expected token; %s at %s",
    95  					_err.Error(), pos(_err.Position()),
    96  				)
    97  			}
    98  		}
    99  
   100  		// did we receive a token?
   101  		if _got == nil {
   102  			t.Fatalf("expected token at %s; none found", _lexer)
   103  		} else if _got.Type != _expected.Type {
   104  			t.Fatalf(
   105  				"token type mismatch; expected type %d, got %d [%s]",
   106  				_expected.Type, _got.Type, _got,
   107  			)
   108  		} else if _got.Name() != _expected.Name {
   109  			t.Fatalf(
   110  				"token name mismatch; expected name %q, got %q [%s]",
   111  				_expected.Name, _got.Name(), _got,
   112  			)
   113  		} else {
   114  			// ensure the extracted token string matches expectation
   115  			//		- we handle EOL separately, since it can change based
   116  			//		  on the end of line sequence of the input file
   117  			_same := _got.Token() == _expected.Token
   118  			if _got.Type == gitignore.EOL {
   119  				_same = _got.Token() == eol
   120  			}
   121  			if !_same {
   122  				t.Fatalf(
   123  					"token value mismatch; expected name %q, got %q [%s]",
   124  					_expected.Token, _got.Token(), _got,
   125  				)
   126  			}
   127  
   128  			// ensure the token position matches the original lexer position
   129  			if !coincident(_got.Position, _position) {
   130  				t.Fatalf(
   131  					"token position mismatch for %s; expected %s, got %s",
   132  					_got, pos(_position), pos(_got.Position),
   133  				)
   134  			}
   135  
   136  			// ensure the token position matches the expected position
   137  			//		- since we will be testing with different line endings, we
   138  			//		  have to choose the correct offset
   139  			_position := gitignore.Position{
   140  				File:   "",
   141  				Line:   _expected.Line,
   142  				Column: _expected.Column,
   143  				Offset: _expected.NewLine,
   144  			}
   145  			if eol == "\r\n" {
   146  				_position.Offset = _expected.CarriageReturn
   147  			}
   148  			if !coincident(_got.Position, _position) {
   149  				t.Log(pos(_got.Position) + "\t" + _got.String())
   150  				t.Fatalf(
   151  					"token position mismatch; expected %s, got %s",
   152  					pos(_position), pos(_got.Position),
   153  				)
   154  			}
   155  		}
   156  	}
   157  
   158  	// ensure there are no more tokens
   159  	_next, _err := _lexer.Next()
   160  	if _err != nil {
   161  		t.Errorf("unexpected error on end of token test: %s", _err.Error())
   162  	} else if _next == nil {
   163  		t.Errorf("unexpected nil token at end of test")
   164  	} else if _next.Type != gitignore.EOF {
   165  		t.Errorf(
   166  			"token type mismatch; expected type %d, got %d [%s]",
   167  			gitignore.EOF, _next.Type, _next,
   168  		)
   169  	}
   170  } // TestLexer()