modernc.org/cc@v1.0.1/v2/testdata/_sqlite/test/fts3atoken.test (about) 1 # 2007 June 21 2 # 3 # The author disclaims copyright to this source code. In place of 4 # a legal notice, here is a blessing: 5 # 6 # May you do good and not evil. 7 # May you find forgiveness for yourself and forgive others. 8 # May you share freely, never taking more than you give. 9 # 10 #************************************************************************* 11 # This file implements regression tests for SQLite library. The focus 12 # of this script is testing the pluggable tokeniser feature of the 13 # FTS3 module. 14 # 15 # $Id: fts3atoken.test,v 1.1 2007/08/20 17:38:42 shess Exp $ 16 # 17 18 set testdir [file dirname $argv0] 19 source $testdir/tester.tcl 20 21 # If SQLITE_ENABLE_FTS3 is defined, omit this file. 22 ifcapable !fts3 { 23 finish_test 24 return 25 } 26 27 set ::testprefix fts3atoken 28 29 proc escape_string {str} { 30 set out "" 31 foreach char [split $str ""] { 32 scan $char %c i 33 if {$i<=127} { 34 append out $char 35 } else { 36 append out [format {\x%.4x} $i] 37 } 38 } 39 set out 40 } 41 42 #-------------------------------------------------------------------------- 43 # Test cases fts3atoken-1.* are the warm-body test for the SQL scalar 44 # function fts3_tokenizer(). The procedure is as follows: 45 # 46 # 1: Verify that there is no such fts3 tokenizer as 'blah'. 47 # 48 # 2: Query for the built-in tokenizer 'simple'. Insert a copy of the 49 # retrieved value as tokenizer 'blah'. 50 # 51 # 3: Test that the value returned for tokenizer 'blah' is now the 52 # same as that retrieved for 'simple'. 53 # 54 # 4: Test that it is now possible to create an fts3 table using 55 # tokenizer 'blah' (it was not possible in step 1). 56 # 57 # 5: Test that the table created to use tokenizer 'blah' is usable. 58 # 59 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 1 60 do_test fts3atoken-1.1 { 61 catchsql { 62 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); 63 } 64 } {1 {unknown tokenizer: blah}} 65 do_test fts3atoken-1.2 { 66 execsql { 67 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL; 68 } 69 } {0} 70 do_test fts3atoken-1.3 { 71 execsql { 72 SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple'); 73 } 74 } {1} 75 do_test fts3atoken-1.4 { 76 catchsql { 77 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); 78 } 79 } {0 {}} 80 do_test fts3atoken-1.5 { 81 execsql { 82 INSERT INTO t1(content) VALUES('There was movement at the station'); 83 INSERT INTO t1(content) VALUES('For the word has passed around'); 84 INSERT INTO t1(content) VALUES('That the colt from ol regret had got'); 85 SELECT content FROM t1 WHERE content MATCH 'movement' 86 } 87 } {{There was movement at the station}} 88 89 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 0 90 do_catchsql_test 1.6 { 91 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL; 92 } {1 {fts3tokenize disabled}} 93 94 95 #-------------------------------------------------------------------------- 96 # Test cases fts3atoken-2.* test error cases in the scalar function based 97 # API for getting and setting tokenizers. 98 # 99 do_test fts3atoken-2.1 { 100 catchsql { 101 SELECT fts3_tokenizer('nosuchtokenizer'); 102 } 103 } {1 {unknown tokenizer: nosuchtokenizer}} 104 105 #-------------------------------------------------------------------------- 106 # Test cases fts3atoken-3.* test the three built-in tokenizers with a 107 # simple input string via the built-in test function. This is as much 108 # to test the test function as the tokenizer implementations. 109 # 110 do_test fts3atoken-3.1 { 111 execsql { 112 SELECT fts3_tokenizer_test('simple', 'I don''t see how'); 113 } 114 } {{0 i I 1 don don 2 t t 3 see see 4 how how}} 115 do_test fts3atoken-3.2 { 116 execsql { 117 SELECT fts3_tokenizer_test('porter', 'I don''t see how'); 118 } 119 } {{0 i I 1 don don 2 t t 3 see see 4 how how}} 120 ifcapable icu { 121 do_test fts3atoken-3.3 { 122 execsql { 123 SELECT fts3_tokenizer_test('icu', 'I don''t see how'); 124 } 125 } {{0 i I 1 don't don't 2 see see 3 how how}} 126 } 127 128 #-------------------------------------------------------------------------- 129 # Test cases fts3atoken-4.* test the ICU tokenizer. In practice, this 130 # tokenizer only has two modes - "thai" and "everybody else". Some other 131 # Asian languages (Lao, Khmer etc.) require the same special treatment as 132 # Thai, but ICU doesn't support them yet. 133 # 134 ifcapable icu { 135 136 proc do_icu_test {name locale input output} { 137 set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }] 138 do_test $name { 139 lindex $::out 0 140 } $output 141 } 142 143 do_icu_test fts3atoken-4.1 en_US {} {} 144 do_icu_test fts3atoken-4.2 en_US {Test cases fts3} [list \ 145 0 test Test 1 cases cases 2 fts3 fts3 146 ] 147 148 # The following test shows that ICU is smart enough to recognise 149 # Thai chararacters, even when the locale is set to English/United 150 # States. 151 # 152 set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a" 153 set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 " 154 append output "1 \u0e19\u0e30 \u0e19\u0e30 " 155 append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a" 156 157 do_icu_test fts3atoken-4.3 th_TH $input $output 158 do_icu_test fts3atoken-4.4 en_US $input $output 159 160 # ICU handles an unknown locale by falling back to the default. 161 # So this is not an error. 162 do_icu_test fts3atoken-4.5 MiddleOfTheOcean $input $output 163 164 set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire" 165 append longtoken "AReallocInTheIcuTokenizerCode" 166 167 set input "short tokens then " 168 append input $longtoken 169 set output "0 short short " 170 append output "1 tokens tokens " 171 append output "2 then then " 172 append output "3 [string tolower $longtoken] $longtoken" 173 174 do_icu_test fts3atoken-4.6 MiddleOfTheOcean $input $output 175 do_icu_test fts3atoken-4.7 th_TH $input $output 176 do_icu_test fts3atoken-4.8 en_US $input $output 177 178 do_execsql_test 5.1 { 179 CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US); 180 insert into x1 (name) values (NULL); 181 insert into x1 (name) values (NULL); 182 delete from x1; 183 } 184 185 proc cp_to_str {codepoint_list} { 186 set fmt [string repeat %c [llength $codepoint_list]] 187 eval [list format $fmt] $codepoint_list 188 } 189 190 do_test 5.2 { 191 set str [cp_to_str {19968 26085 32822 32645 27874 23433 20986}] 192 execsql { INSERT INTO x1 VALUES($str) } 193 } {} 194 } 195 196 do_test fts3atoken-internal { 197 execsql { SELECT fts3_tokenizer_internal_test() } 198 } {ok} 199 200 #------------------------------------------------------------------------- 201 # Test empty tokenizer names. 202 # 203 do_catchsql_test 6.1.1 { 204 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=""); 205 } {1 {unknown tokenizer: }} 206 do_catchsql_test 6.1.2 { 207 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=); 208 } {1 {unknown tokenizer: }} 209 do_catchsql_test 6.1.3 { 210 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=" "); 211 } {1 {unknown tokenizer: }} 212 213 do_catchsql_test 6.2.1 { 214 SELECT fts3_tokenizer(NULL); 215 } {1 {unknown tokenizer: }} 216 217 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 1 218 do_catchsql_test 6.2.2 { 219 SELECT fts3_tokenizer(NULL, X'1234567812345678'); 220 } {1 {argument type mismatch}} 221 do_catchsql_test 6.2.3 { 222 SELECT fts3_tokenizer(NULL, X'12345678'); 223 } {1 {argument type mismatch}} 224 225 226 finish_test