github.com/ianlewis/go-gitignore@v0.1.1-0.20231110021210-4a0f15cbd56f/lexer_test.go (about)

     1  // Copyright 2016 Denormal Limited
     2  // Copyright 2023 Google LLC
     3  //
     4  // Licensed under the Apache License, Version 2.0 (the "License");
     5  // you may not use this file except in compliance with the License.
     6  // You may obtain a copy of the License at
     7  //
     8  //      http://www.apache.org/licenses/LICENSE-2.0
     9  //
    10  // Unless required by applicable law or agreed to in writing, software
    11  // distributed under the License is distributed on an "AS IS" BASIS,
    12  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13  // See the License for the specific language governing permissions and
    14  // limitations under the License.
    15  
    16  package gitignore_test
    17  
    18  import (
    19  	"fmt"
    20  	"strings"
    21  	"testing"
    22  
    23  	"github.com/ianlewis/go-gitignore"
    24  )
    25  
    26  // TestLexerNewLne tests the behavour of the gitignore.Lexer when the input
    27  // data explicitly uses "\n" as the line separator
    28  func TestLexerNewLine(t *testing.T) {
    29  	// split the test content into lines
    30  	//		- ensure we handle "\n" and "\r" correctly
    31  	//		- since this test file is written on a system that uses "\n"
    32  	//		  to designate end of line, this should be unnecessary, but it's
    33  	//		  possible for the file line endings to be converted outside of
    34  	//		  this repository, so we are thorough here to ensure the test
    35  	//		  works as expected everywhere
    36  	_content := strings.Split(_GITIGNORE, "\n")
    37  	for _i := 0; _i < len(_content); _i++ {
    38  		_content[_i] = strings.TrimSuffix(_content[_i], "\r")
    39  	}
    40  
    41  	// perform the Lexer test with input explicitly separated by "\n"
    42  	lexer(t, _content, "\n", _GITTOKENS, nil)
    43  } // TestLexerNewLine()
    44  
    45  // TestLexerCarriageReturn tests the behavour of the gitignore.Lexer when the
    46  // input data explicitly uses "\r\n" as the line separator
    47  func TestLexerCarriageReturn(t *testing.T) {
    48  	// split the test content into lines
    49  	//		- see above
    50  	_content := strings.Split(_GITIGNORE, "\n")
    51  	for _i := 0; _i < len(_content); _i++ {
    52  		_content[_i] = strings.TrimSuffix(_content[_i], "\r")
    53  	}
    54  
    55  	// perform the Lexer test with input explicitly separated by "\r\n"
    56  	lexer(t, _content, "\r\n", _GITTOKENS, nil)
    57  } // TestLexerCarriageReturn()
    58  
    59  func TestLexerInvalidNewLine(t *testing.T) {
    60  	// perform the Lexer test with invalid input separated by "\n"
    61  	//		- the source content is manually constructed with "\n" as EOL
    62  	_content := strings.Split(_GITINVALID, "\n")
    63  	lexer(t, _content, "\n", _TOKENSINVALID, gitignore.CarriageReturnError)
    64  } // TestLexerInvalidNewLine()
    65  
    66  func TestLexerInvalidCarriageReturn(t *testing.T) {
    67  	// perform the Lexer test with invalid input separated by "\n"
    68  	//		- the source content is manually constructed with "\n" as EOL
    69  	_content := strings.Split(_GITINVALID, "\n")
    70  	lexer(t, _content, "\r\n", _TOKENSINVALID, gitignore.CarriageReturnError)
    71  } // TestLexerInvalidCarriageReturn()
    72  
    73  func lexer(t *testing.T, lines []string, eol string, tokens []token, e error) {
    74  	// create a temporary .gitignore
    75  	_buffer, _err := buffer(strings.Join(lines, eol))
    76  	if _err != nil {
    77  		t.Fatalf("unable to create temporary .gitignore: %s", _err.Error())
    78  	}
    79  
    80  	// ensure we have a non-nil Lexer instance
    81  	_lexer := gitignore.NewLexer(_buffer)
    82  	if _lexer == nil {
    83  		t.Error("expected non-nil Lexer instance; nil found")
    84  	}
    85  
    86  	// ensure the stream of tokens is as we expect
    87  	for _, _expected := range tokens {
    88  		_position := _lexer.Position()
    89  
    90  		// ensure the string form of the Lexer reports the correct position
    91  		_string := fmt.Sprintf("%d:%d", _position.Line, _position.Column)
    92  		if _lexer.String() != _string {
    93  			t.Errorf(
    94  				"lexer string mismatch; expected %q, got %q",
    95  				_string, _position.String(),
    96  			)
    97  		}
    98  
    99  		// extract the next token from the lexer
   100  		_got, _err := _lexer.Next()
   101  
   102  		// ensure we did not receive an error and the token is as expected
   103  		if _err != nil {
   104  			// if we expect an error during processing, check to see if
   105  			// the received error is as expected
   106  			//			if !_err.Is(e) {
   107  			if _err.Underlying() != e {
   108  				t.Fatalf(
   109  					"unable to retrieve expected token; %s at %s",
   110  					_err.Error(), pos(_err.Position()),
   111  				)
   112  			}
   113  		}
   114  
   115  		// did we receive a token?
   116  		if _got == nil {
   117  			t.Fatalf("expected token at %s; none found", _lexer)
   118  		}
   119  
   120  		if _got.Type != _expected.Type {
   121  			t.Fatalf(
   122  				"token type mismatch; expected type %d, got %d [%s]",
   123  				_expected.Type, _got.Type, _got,
   124  			)
   125  		}
   126  
   127  		if _got.Name() != _expected.Name {
   128  			t.Fatalf(
   129  				"token name mismatch; expected name %q, got %q [%s]",
   130  				_expected.Name, _got.Name(), _got,
   131  			)
   132  		}
   133  
   134  		// ensure the extracted token string matches expectation
   135  		//		- we handle EOL separately, since it can change based
   136  		//		  on the end of line sequence of the input file
   137  		_same := _got.Token() == _expected.Token
   138  		if _got.Type == gitignore.EOL {
   139  			_same = _got.Token() == eol
   140  		}
   141  		if !_same {
   142  			t.Fatalf(
   143  				"token value mismatch; expected name %q, got %q [%s]",
   144  				_expected.Token, _got.Token(), _got,
   145  			)
   146  		}
   147  
   148  		// ensure the token position matches the original lexer position
   149  		if !coincident(_got.Position, _position) {
   150  			t.Fatalf(
   151  				"token position mismatch for %s; expected %s, got %s",
   152  				_got, pos(_position), pos(_got.Position),
   153  			)
   154  		}
   155  
   156  		// ensure the token position matches the expected position
   157  		//		- since we will be testing with different line endings, we
   158  		//		  have to choose the correct offset
   159  		_ignorePosition := gitignore.Position{
   160  			File:   "",
   161  			Line:   _expected.Line,
   162  			Column: _expected.Column,
   163  			Offset: _expected.NewLine,
   164  		}
   165  		if eol == "\r\n" {
   166  			_ignorePosition.Offset = _expected.CarriageReturn
   167  		}
   168  		if !coincident(_got.Position, _ignorePosition) {
   169  			t.Log(pos(_got.Position) + "\t" + _got.String())
   170  			t.Fatalf(
   171  				"token position mismatch; expected %s, got %s",
   172  				pos(_ignorePosition), pos(_got.Position),
   173  			)
   174  		}
   175  	}
   176  
   177  	// ensure there are no more tokens
   178  	_next, _err := _lexer.Next()
   179  	if _err != nil {
   180  		t.Errorf("unexpected error on end of token test: %s", _err.Error())
   181  	} else if _next == nil {
   182  		t.Errorf("unexpected nil token at end of test")
   183  	} else if _next.Type != gitignore.EOF {
   184  		t.Errorf(
   185  			"token type mismatch; expected type %d, got %d [%s]",
   186  			gitignore.EOF, _next.Type, _next,
   187  		)
   188  	}
   189  } // TestLexer()