github.com/aigarnetwork/aigar@v0.0.0-20191115204914-d59a6eb70f8e/core/asm/lex_test.go (about)

     1  //  Copyright 2018 The go-ethereum Authors
     2  //  Copyright 2019 The go-aigar Authors
     3  //  This file is part of the go-aigar library.
     4  //
     5  //  The go-aigar library is free software: you can redistribute it and/or modify
     6  //  it under the terms of the GNU Lesser General Public License as published by
     7  //  the Free Software Foundation, either version 3 of the License, or
     8  //  (at your option) any later version.
     9  //
    10  //  The go-aigar library is distributed in the hope that it will be useful,
    11  //  but WITHOUT ANY WARRANTY; without even the implied warranty of
    12  //  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
    13  //  GNU Lesser General Public License for more details.
    14  //
    15  //  You should have received a copy of the GNU Lesser General Public License
    16  //  along with the go-aigar library. If not, see <http://www.gnu.org/licenses/>.
    17  
    18  package asm
    19  
    20  import (
    21  	"reflect"
    22  	"testing"
    23  )
    24  
    25  func lexAll(src string) []token {
    26  	ch := Lex([]byte(src), false)
    27  
    28  	var tokens []token
    29  	for i := range ch {
    30  		tokens = append(tokens, i)
    31  	}
    32  	return tokens
    33  }
    34  
    35  func TestLexer(t *testing.T) {
    36  	tests := []struct {
    37  		input  string
    38  		tokens []token
    39  	}{
    40  		{
    41  			input:  ";; this is a comment",
    42  			tokens: []token{{typ: lineStart}, {typ: eof}},
    43  		},
    44  		{
    45  			input:  "0x12345678",
    46  			tokens: []token{{typ: lineStart}, {typ: number, text: "0x12345678"}, {typ: eof}},
    47  		},
    48  		{
    49  			input:  "0x123ggg",
    50  			tokens: []token{{typ: lineStart}, {typ: number, text: "0x123"}, {typ: element, text: "ggg"}, {typ: eof}},
    51  		},
    52  		{
    53  			input:  "12345678",
    54  			tokens: []token{{typ: lineStart}, {typ: number, text: "12345678"}, {typ: eof}},
    55  		},
    56  		{
    57  			input:  "123abc",
    58  			tokens: []token{{typ: lineStart}, {typ: number, text: "123"}, {typ: element, text: "abc"}, {typ: eof}},
    59  		},
    60  		{
    61  			input:  "0123abc",
    62  			tokens: []token{{typ: lineStart}, {typ: number, text: "0123"}, {typ: element, text: "abc"}, {typ: eof}},
    63  		},
    64  	}
    65  
    66  	for _, test := range tests {
    67  		tokens := lexAll(test.input)
    68  		if !reflect.DeepEqual(tokens, test.tokens) {
    69  			t.Errorf("input %q\ngot:  %+v\nwant: %+v", test.input, tokens, test.tokens)
    70  		}
    71  	}
    72  }