github.com/lbryio/lbcd@v0.22.119/txscript/tokenizer_test.go (about)

     1  // Copyright (c) 2019 The Decred developers
     2  // Use of this source code is governed by an ISC
     3  // license that can be found in the LICENSE file.
     4  
     5  package txscript
     6  
     7  import (
     8  	"bytes"
     9  	"fmt"
    10  	"testing"
    11  )
    12  
    13  // TestScriptTokenizer ensures a wide variety of behavior provided by the script
    14  // tokenizer performs as expected.
    15  func TestScriptTokenizer(t *testing.T) {
    16  	t.Skip()
    17  
    18  	type expectedResult struct {
    19  		op    byte   // expected parsed opcode
    20  		data  []byte // expected parsed data
    21  		index int32  // expected index into raw script after parsing token
    22  	}
    23  
    24  	type tokenizerTest struct {
    25  		name     string           // test description
    26  		script   []byte           // the script to tokenize
    27  		expected []expectedResult // the expected info after parsing each token
    28  		finalIdx int32            // the expected final byte index
    29  		err      error            // expected error
    30  	}
    31  
    32  	// Add both positive and negative tests for OP_DATA_1 through OP_DATA_75.
    33  	const numTestsHint = 100 // Make prealloc linter happy.
    34  	tests := make([]tokenizerTest, 0, numTestsHint)
    35  	for op := byte(OP_DATA_1); op < OP_DATA_75; op++ {
    36  		data := bytes.Repeat([]byte{0x01}, int(op))
    37  		tests = append(tests, tokenizerTest{
    38  			name:     fmt.Sprintf("OP_DATA_%d", op),
    39  			script:   append([]byte{op}, data...),
    40  			expected: []expectedResult{{op, data, 1 + int32(op)}},
    41  			finalIdx: 1 + int32(op),
    42  			err:      nil,
    43  		})
    44  
    45  		// Create test that provides one less byte than the data push requires.
    46  		tests = append(tests, tokenizerTest{
    47  			name:     fmt.Sprintf("short OP_DATA_%d", op),
    48  			script:   append([]byte{op}, data[1:]...),
    49  			expected: nil,
    50  			finalIdx: 0,
    51  			err:      scriptError(ErrMalformedPush, ""),
    52  		})
    53  	}
    54  
    55  	// Add both positive and negative tests for OP_PUSHDATA{1,2,4}.
    56  	data := mustParseShortForm("0x01{76}")
    57  	tests = append(tests, []tokenizerTest{{
    58  		name:     "OP_PUSHDATA1",
    59  		script:   mustParseShortForm("OP_PUSHDATA1 0x4c 0x01{76}"),
    60  		expected: []expectedResult{{OP_PUSHDATA1, data, 2 + int32(len(data))}},
    61  		finalIdx: 2 + int32(len(data)),
    62  		err:      nil,
    63  	}, {
    64  		name:     "OP_PUSHDATA1 no data length",
    65  		script:   mustParseShortForm("OP_PUSHDATA1"),
    66  		expected: nil,
    67  		finalIdx: 0,
    68  		err:      scriptError(ErrMalformedPush, ""),
    69  	}, {
    70  		name:     "OP_PUSHDATA1 short data by 1 byte",
    71  		script:   mustParseShortForm("OP_PUSHDATA1 0x4c 0x01{75}"),
    72  		expected: nil,
    73  		finalIdx: 0,
    74  		err:      scriptError(ErrMalformedPush, ""),
    75  	}, {
    76  		name:     "OP_PUSHDATA2",
    77  		script:   mustParseShortForm("OP_PUSHDATA2 0x4c00 0x01{76}"),
    78  		expected: []expectedResult{{OP_PUSHDATA2, data, 3 + int32(len(data))}},
    79  		finalIdx: 3 + int32(len(data)),
    80  		err:      nil,
    81  	}, {
    82  		name:     "OP_PUSHDATA2 no data length",
    83  		script:   mustParseShortForm("OP_PUSHDATA2"),
    84  		expected: nil,
    85  		finalIdx: 0,
    86  		err:      scriptError(ErrMalformedPush, ""),
    87  	}, {
    88  		name:     "OP_PUSHDATA2 short data by 1 byte",
    89  		script:   mustParseShortForm("OP_PUSHDATA2 0x4c00 0x01{75}"),
    90  		expected: nil,
    91  		finalIdx: 0,
    92  		err:      scriptError(ErrMalformedPush, ""),
    93  	}, {
    94  		name:     "OP_PUSHDATA4",
    95  		script:   mustParseShortForm("OP_PUSHDATA4 0x4c000000 0x01{76}"),
    96  		expected: []expectedResult{{OP_PUSHDATA4, data, 5 + int32(len(data))}},
    97  		finalIdx: 5 + int32(len(data)),
    98  		err:      nil,
    99  	}, {
   100  		name:     "OP_PUSHDATA4 no data length",
   101  		script:   mustParseShortForm("OP_PUSHDATA4"),
   102  		expected: nil,
   103  		finalIdx: 0,
   104  		err:      scriptError(ErrMalformedPush, ""),
   105  	}, {
   106  		name:     "OP_PUSHDATA4 short data by 1 byte",
   107  		script:   mustParseShortForm("OP_PUSHDATA4 0x4c000000 0x01{75}"),
   108  		expected: nil,
   109  		finalIdx: 0,
   110  		err:      scriptError(ErrMalformedPush, ""),
   111  	}}...)
   112  
   113  	// Add tests for OP_0, and OP_1 through OP_16 (small integers/true/false).
   114  	opcodes := []byte{OP_0}
   115  	for op := byte(OP_1); op < OP_16; op++ {
   116  		opcodes = append(opcodes, op)
   117  	}
   118  	for _, op := range opcodes {
   119  		tests = append(tests, tokenizerTest{
   120  			name:     fmt.Sprintf("OP_%d", op),
   121  			script:   []byte{op},
   122  			expected: []expectedResult{{op, nil, 1}},
   123  			finalIdx: 1,
   124  			err:      nil,
   125  		})
   126  	}
   127  
   128  	// Add various positive and negative tests for multi-opcode scripts.
   129  	tests = append(tests, []tokenizerTest{{
   130  		name:   "pay-to-pubkey-hash",
   131  		script: mustParseShortForm("DUP HASH160 DATA_20 0x01{20} EQUAL CHECKSIG"),
   132  		expected: []expectedResult{
   133  			{OP_DUP, nil, 1}, {OP_HASH160, nil, 2},
   134  			{OP_DATA_20, mustParseShortForm("0x01{20}"), 23},
   135  			{OP_EQUAL, nil, 24}, {OP_CHECKSIG, nil, 25},
   136  		},
   137  		finalIdx: 25,
   138  		err:      nil,
   139  	}, {
   140  		name:   "almost pay-to-pubkey-hash (short data)",
   141  		script: mustParseShortForm("DUP HASH160 DATA_20 0x01{17} EQUAL CHECKSIG"),
   142  		expected: []expectedResult{
   143  			{OP_DUP, nil, 1}, {OP_HASH160, nil, 2},
   144  		},
   145  		finalIdx: 2,
   146  		err:      scriptError(ErrMalformedPush, ""),
   147  	}, {
   148  		name:   "almost pay-to-pubkey-hash (overlapped data)",
   149  		script: mustParseShortForm("DUP HASH160 DATA_20 0x01{19} EQUAL CHECKSIG"),
   150  		expected: []expectedResult{
   151  			{OP_DUP, nil, 1}, {OP_HASH160, nil, 2},
   152  			{OP_DATA_20, mustParseShortForm("0x01{19} EQUAL"), 23},
   153  			{OP_CHECKSIG, nil, 24},
   154  		},
   155  		finalIdx: 24,
   156  		err:      nil,
   157  	}, {
   158  		name:   "pay-to-script-hash",
   159  		script: mustParseShortForm("HASH160 DATA_20 0x01{20} EQUAL"),
   160  		expected: []expectedResult{
   161  			{OP_HASH160, nil, 1},
   162  			{OP_DATA_20, mustParseShortForm("0x01{20}"), 22},
   163  			{OP_EQUAL, nil, 23},
   164  		},
   165  		finalIdx: 23,
   166  		err:      nil,
   167  	}, {
   168  		name:   "almost pay-to-script-hash (short data)",
   169  		script: mustParseShortForm("HASH160 DATA_20 0x01{18} EQUAL"),
   170  		expected: []expectedResult{
   171  			{OP_HASH160, nil, 1},
   172  		},
   173  		finalIdx: 1,
   174  		err:      scriptError(ErrMalformedPush, ""),
   175  	}, {
   176  		name:   "almost pay-to-script-hash (overlapped data)",
   177  		script: mustParseShortForm("HASH160 DATA_20 0x01{19} EQUAL"),
   178  		expected: []expectedResult{
   179  			{OP_HASH160, nil, 1},
   180  			{OP_DATA_20, mustParseShortForm("0x01{19} EQUAL"), 22},
   181  		},
   182  		finalIdx: 22,
   183  		err:      nil,
   184  	}}...)
   185  
   186  	const scriptVersion = 0
   187  	for _, test := range tests {
   188  		tokenizer := MakeScriptTokenizer(scriptVersion, test.script)
   189  		var opcodeNum int
   190  		for tokenizer.Next() {
   191  			// Ensure Next never returns true when there is an error set.
   192  			if err := tokenizer.Err(); err != nil {
   193  				t.Fatalf("%q: Next returned true when tokenizer has err: %v",
   194  					test.name, err)
   195  			}
   196  
   197  			// Ensure the test data expects a token to be parsed.
   198  			op := tokenizer.Opcode()
   199  			data := tokenizer.Data()
   200  			if opcodeNum >= len(test.expected) {
   201  				t.Fatalf("%q: unexpected token '%d' (data: '%x')", test.name,
   202  					op, data)
   203  			}
   204  			expected := &test.expected[opcodeNum]
   205  
   206  			// Ensure the opcode and data are the expected values.
   207  			if op != expected.op {
   208  				t.Fatalf("%q: unexpected opcode -- got %v, want %v", test.name,
   209  					op, expected.op)
   210  			}
   211  			if !bytes.Equal(data, expected.data) {
   212  				t.Fatalf("%q: unexpected data -- got %x, want %x", test.name,
   213  					data, expected.data)
   214  			}
   215  
   216  			tokenizerIdx := tokenizer.ByteIndex()
   217  			if tokenizerIdx != expected.index {
   218  				t.Fatalf("%q: unexpected byte index -- got %d, want %d",
   219  					test.name, tokenizerIdx, expected.index)
   220  			}
   221  
   222  			opcodeNum++
   223  		}
   224  
   225  		// Ensure the tokenizer claims it is done.  This should be the case
   226  		// regardless of whether or not there was a parse error.
   227  		if !tokenizer.Done() {
   228  			t.Fatalf("%q: tokenizer claims it is not done", test.name)
   229  		}
   230  
   231  		// Ensure the error is as expected.
   232  		if test.err == nil && tokenizer.Err() != nil {
   233  			t.Fatalf("%q: unexpected tokenizer err -- got %v, want nil",
   234  				test.name, tokenizer.Err())
   235  		} else if test.err != nil {
   236  			if !IsErrorCode(tokenizer.Err(), test.err.(Error).ErrorCode) {
   237  				t.Fatalf("%q: unexpected tokenizer err -- got %v, want %v",
   238  					test.name, tokenizer.Err(), test.err.(Error).ErrorCode)
   239  			}
   240  		}
   241  
   242  		// Ensure the final index is the expected value.
   243  		tokenizerIdx := tokenizer.ByteIndex()
   244  		if tokenizerIdx != test.finalIdx {
   245  			t.Fatalf("%q: unexpected final byte index -- got %d, want %d",
   246  				test.name, tokenizerIdx, test.finalIdx)
   247  		}
   248  	}
   249  }
   250  
   251  // TestScriptTokenizerUnsupportedVersion ensures the tokenizer fails immediately
   252  // with an unsupported script version.
   253  func TestScriptTokenizerUnsupportedVersion(t *testing.T) {
   254  	const scriptVersion = 65535
   255  	tokenizer := MakeScriptTokenizer(scriptVersion, nil)
   256  	if !IsErrorCode(tokenizer.Err(), ErrUnsupportedScriptVersion) {
   257  		t.Fatalf("script tokenizer did not error with unsupported version")
   258  	}
   259  }