github.com/klaytn/klaytn@v1.12.1/blockchain/asm/lex_test.go (about) 1 // Modifications Copyright 2018 The klaytn Authors 2 // Copyright 2017 The go-ethereum Authors 3 // This file is part of the go-ethereum library. 4 // 5 // The go-ethereum library is free software: you can redistribute it and/or modify 6 // it under the terms of the GNU Lesser General Public License as published by 7 // the Free Software Foundation, either version 3 of the License, or 8 // (at your option) any later version. 9 // 10 // The go-ethereum library is distributed in the hope that it will be useful, 11 // but WITHOUT ANY WARRANTY; without even the implied warranty of 12 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 // GNU Lesser General Public License for more details. 14 // 15 // You should have received a copy of the GNU Lesser General Public License 16 // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. 17 // 18 // This file is derived from core/asm/lex_test.go (2018/06/04). 19 // Modified and improved for the klaytn development. 20 21 package asm 22 23 import ( 24 "reflect" 25 "testing" 26 ) 27 28 func lexAll(src string) []token { 29 ch := Lex([]byte(src), false) 30 31 var tokens []token 32 for i := range ch { 33 tokens = append(tokens, i) 34 } 35 return tokens 36 } 37 38 func TestLexer(t *testing.T) { 39 tests := []struct { 40 input string 41 tokens []token 42 }{ 43 { 44 input: ";; this is a comment", 45 tokens: []token{{typ: lineStart}, {typ: eof}}, 46 }, 47 { 48 input: "0x12345678", 49 tokens: []token{{typ: lineStart}, {typ: number, text: "0x12345678"}, {typ: eof}}, 50 }, 51 { 52 input: "0x123ggg", 53 tokens: []token{{typ: lineStart}, {typ: number, text: "0x123"}, {typ: element, text: "ggg"}, {typ: eof}}, 54 }, 55 { 56 input: "12345678", 57 tokens: []token{{typ: lineStart}, {typ: number, text: "12345678"}, {typ: eof}}, 58 }, 59 { 60 input: "123abc", 61 tokens: []token{{typ: lineStart}, {typ: number, text: "123"}, {typ: element, text: "abc"}, {typ: eof}}, 62 }, 63 { 64 input: "0123abc", 65 tokens: []token{{typ: lineStart}, {typ: number, text: "0123"}, {typ: element, text: "abc"}, {typ: eof}}, 66 }, 67 } 68 69 for _, test := range tests { 70 tokens := lexAll(test.input) 71 if !reflect.DeepEqual(tokens, test.tokens) { 72 t.Errorf("input %q\ngot: %+v\nwant: %+v", test.input, tokens, test.tokens) 73 } 74 } 75 }