1 local json
= require("json")
2 local lunit
= require("lunit")
3 local testutil
= require("testutil")
5 local encode
= json
.encode
6 -- DECODE NOT 'local' due to requirement for testutil to access it
7 decode
= json
.decode
.getDecoder(false)
11 module("lunit-strings", lunit
.testcase
, package
.seeall
)
13 local function assert_table_equal(expect
, t
)
14 if type(expect
) ~= 'table' then
15 return assert_equal(expect
, t
)
17 for k
,v
in pairs(expect
) do
18 if type(k
) ~= 'string' and type(k
) ~= 'number' and type(k
) ~= 'boolean' then
19 error("INVALID expected table key")
23 fail(tostring(k
) .. " not found but expected")
25 assert_table_equal(v
, t
[k
])
27 for k
,v
in pairs(t
) do
28 if nil == expect
[k
] then
29 fail(tostring(k
) .. " found but not expected")
35 -- Ensure that the decoder is reset
36 _G
["decode"] = json
.decode
.getDecoder(false)
39 function test_encoder_preprocess()
42 preProcess
= function(str
)
43 return str
:gsub("world", "land")
47 assert_equal([["Hello land"]], json
.encode("Hello world", opts
))
50 function test_post_process()
53 postProcess
= function(value
)
54 -- Test that value processed is after escape handling
55 assert_equal("test\n", value
)
60 local decode
= json
.decode
.getDecoder(opts
)
61 local ret
= decode([["test\n"]])
62 -- Test that returned values are used
63 assert_equal("arg", ret
)
66 function test_strict_quotes()
72 assert_error(function()
73 local decoder
= json
.decode
.getDecoder(opts
)
76 opts
.strings
.strict_quotes
= false
77 assert_equal("hello", json
.decode
.getDecoder(opts
)("'hello'"))
79 assert_equal("he'\"llo'", json
.decode
.getDecoder(opts
)("'he\\'\"llo\\''"))
83 local utf16_matches
= {
85 { '"\\u0000"', string.char(0x00) },
86 { '"\\u007F"', string.char(0x7F) },
88 { '"\\u0080"', string.char(0xC2, 0x80) },
89 { '"\\u00A2"', string.char(0xC2, 0xA2) },
90 { '"\\u07FF"', string.char(0xDF, 0xBF) },
92 { '"\\u0800"', string.char(0xE0, 0xA0, 0x80) },
93 { '"\\u20AC"', string.char(0xE2, 0x82, 0xAC) },
94 { '"\\uFEFF"', string.char(0xEF, 0xBB, 0xBF) },
95 { '"\\uFFFF"', string.char(0xEF, 0xBF, 0xBF) }
98 function test_utf16_decode()
99 for i
, v
in ipairs(utf16_matches
) do
100 -- Test that the default \u decoder outputs UTF8
101 local num
= tostring(i
) .. ' '
102 assert_equal(num
.. v
[2], num
.. json
.decode(v
[1]))
106 local BOM
= string.char(0xEF, 0xBB, 0xBF)
107 -- BOM skipping tests - here due to relation to UTF8/16
108 local BOM_skip_tests
= {
109 { BOM
.. '"x"', "x" },
110 { BOM
.. '["\\uFFFF",true]', { string.char(0xEF, 0xBF, 0xBF), true } },
111 -- Other uses of unicode spaces
114 function test_bom_skip()
115 for i
,v
in ipairs(BOM_skip_tests
) do
116 assert_table_equal(v
[2], json
.decode(v
[1]))
120 -- Unicode whitespace codepoints gleaned from unicode.org
121 local WHITESPACES
= {
143 "\\u200B", -- addition, zero-width space
149 "\\uFEFF" -- Zero-width non-breaking space (BOM)
152 local inject_ws_values
= {
154 " %WS%'the%WS blob' %WS%",
155 "%WS%{ key: %WS%\"valueMan\",%WS% key2:%WS%4.4}",
158 function test_whitespace_ignore()
159 for _
, ws
in ipairs(WHITESPACES
) do
160 ws
= json
.decode('"' .. ws
.. '"')
161 for _
, v
in ipairs(inject_ws_values
) do
162 v
= v
:gsub("%%WS%%", ws
)
163 assert_true(nil ~= json
.decode(v
))
168 function test_u_encoding()
169 local encoder
= json
.encode
.getEncoder()
170 local decoder
= json
.decode
.getDecoder()
172 local char
= string.char(i
)
173 assert_equal(char
, decoder(encoder(char
)))
177 function test_x_encoding()
178 local encoder
= json
.encode
.getEncoder({ strings
= { xEncode
= true } })
179 local decoder
= json
.decode
.getDecoder()
181 local char
= string.char(i
)
182 assert_equal(char
, decoder(encoder(char
)))
186 function test_strict_decoding()
187 local encoder
= json
.encode
.getEncoder(json
.encode
.strict
)
188 local decoder
= json
.decode
.getDecoder(json
.decode
.strict
)
190 local char
= string.char(i
)
191 -- Must wrap character in array due to decoder strict-ness
192 assert_equal(char
, decoder(encoder({char
}))[1])