3 # The author disclaims copyright to this source code. In place of
4 # a legal notice, here is a blessing:
6 # May you do good and not evil.
7 # May you find forgiveness for yourself and forgive others.
8 # May you share freely, never taking more than you give.
10 #*************************************************************************
11 # This file implements regression tests for SQLite library. The focus
12 # of this script is testing the pluggable tokeniser feature of the
15 # $Id: fts3atoken.test,v 1.1 2007/08/20 17:38:42 shess Exp $
18 set testdir [file dirname $argv0]
19 source $testdir/tester.tcl
21 # If SQLITE_ENABLE_FTS3 is defined, omit this file.
27 set ::testprefix fts3atoken
29 proc escape_string {str} {
31 foreach char [split $str ""] {
36 append out [format {\x%.4x} $i]
42 #--------------------------------------------------------------------------
43 # Test cases fts3atoken-1.* are the warm-body test for the SQL scalar
44 # function fts3_tokenizer(). The procedure is as follows:
46 # 1: Verify that there is no such fts3 tokenizer as 'blah'.
48 # 2: Query for the built-in tokenizer 'simple'. Insert a copy of the
49 # retrieved value as tokenizer 'blah'.
51 # 3: Test that the value returned for tokenizer 'blah' is now the
52 # same as that retrieved for 'simple'.
54 # 4: Test that it is now possible to create an fts3 table using
55 # tokenizer 'blah' (it was not possible in step 1).
57 # 5: Test that the table created to use tokenizer 'blah' is usable.
59 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 1
60 do_test fts3atoken-1.1 {
62 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah);
64 } {1 {unknown tokenizer: blah}}
65 do_test fts3atoken-1.2 {
67 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL;
70 do_test fts3atoken-1.3 {
72 SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple');
75 do_test fts3atoken-1.4 {
77 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah);
80 do_test fts3atoken-1.5 {
82 INSERT INTO t1(content) VALUES('There was movement at the station');
83 INSERT INTO t1(content) VALUES('For the word has passed around');
84 INSERT INTO t1(content) VALUES('That the colt from ol regret had got');
85 SELECT content FROM t1 WHERE content MATCH 'movement'
87 } {{There was movement at the station}}
89 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 0
90 do_catchsql_test 1.6 {
91 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL;
92 } {1 {fts3tokenize disabled}}
95 #--------------------------------------------------------------------------
96 # Test cases fts3atoken-2.* test error cases in the scalar function based
97 # API for getting and setting tokenizers.
99 do_test fts3atoken-2.1 {
101 SELECT fts3_tokenizer('nosuchtokenizer');
103 } {1 {unknown tokenizer: nosuchtokenizer}}
105 #--------------------------------------------------------------------------
106 # Test cases fts3atoken-3.* test the three built-in tokenizers with a
107 # simple input string via the built-in test function. This is as much
108 # to test the test function as the tokenizer implementations.
110 do_test fts3atoken-3.1 {
112 SELECT fts3_tokenizer_test('simple', 'I don''t see how');
114 } {{0 i I 1 don don 2 t t 3 see see 4 how how}}
115 do_test fts3atoken-3.2 {
117 SELECT fts3_tokenizer_test('porter', 'I don''t see how');
119 } {{0 i I 1 don don 2 t t 3 see see 4 how how}}
121 do_test fts3atoken-3.3 {
123 SELECT fts3_tokenizer_test('icu', 'I don''t see how');
125 } {{0 i I 1 don't don't 2 see see 3 how how}}
128 #--------------------------------------------------------------------------
129 # Test cases fts3atoken-4.* test the ICU tokenizer. In practice, this
130 # tokenizer only has two modes - "thai" and "everybody else". Some other
131 # Asian languages (Lao, Khmer etc.) require the same special treatment as
132 # Thai, but ICU doesn't support them yet.
136 proc do_icu_test {name locale input output} {
137 set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }]
143 do_icu_test fts3atoken-4.1 en_US {} {}
144 do_icu_test fts3atoken-4.2 en_US {Test cases fts3} [list \
145 0 test Test 1 cases cases 2 fts3 fts3
148 # The following test shows that ICU is smart enough to recognise
149 # Thai chararacters, even when the locale is set to English/United
152 set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a"
153 set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 "
154 append output "1 \u0e19\u0e30 \u0e19\u0e30 "
155 append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a"
157 do_icu_test fts3atoken-4.3 th_TH $input $output
158 do_icu_test fts3atoken-4.4 en_US $input $output
160 # ICU handles an unknown locale by falling back to the default.
161 # So this is not an error.
162 do_icu_test fts3atoken-4.5 MiddleOfTheOcean $input $output
164 set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire"
165 append longtoken "AReallocInTheIcuTokenizerCode"
167 set input "short tokens then "
168 append input $longtoken
169 set output "0 short short "
170 append output "1 tokens tokens "
171 append output "2 then then "
172 append output "3 [string tolower $longtoken] $longtoken"
174 do_icu_test fts3atoken-4.6 MiddleOfTheOcean $input $output
175 do_icu_test fts3atoken-4.7 th_TH $input $output
176 do_icu_test fts3atoken-4.8 en_US $input $output
178 do_execsql_test 5.1 {
179 CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US);
180 insert into x1 (name) values (NULL);
181 insert into x1 (name) values (NULL);
185 proc cp_to_str {codepoint_list} {
186 set fmt [string repeat %c [llength $codepoint_list]]
187 eval [list format $fmt] $codepoint_list
191 set str [cp_to_str {19968 26085 32822 32645 27874 23433 20986}]
192 execsql { INSERT INTO x1 VALUES($str) }
196 do_test fts3atoken-internal {
197 execsql { SELECT fts3_tokenizer_internal_test() }
200 #-------------------------------------------------------------------------
201 # Test empty tokenizer names.
203 do_catchsql_test 6.1.1 {
204 CREATE VIRTUAL TABLE t3 USING fts4(tokenize="");
205 } {1 {unknown tokenizer: }}
206 do_catchsql_test 6.1.2 {
207 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=);
208 } {1 {unknown tokenizer: }}
209 do_catchsql_test 6.1.3 {
210 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=" ");
211 } {1 {unknown tokenizer: }}
213 do_catchsql_test 6.2.1 {
214 SELECT fts3_tokenizer(NULL);
215 } {1 {unknown tokenizer: }}
217 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 1
218 do_catchsql_test 6.2.2 {
219 SELECT fts3_tokenizer(NULL, X'1234567812345678');
220 } {1 {argument type mismatch}}
221 do_catchsql_test 6.2.3 {
222 SELECT fts3_tokenizer(NULL, X'12345678');
223 } {1 {argument type mismatch}}