From: chenguoping Date: Fri, 27 Dec 2019 07:07:00 +0000 (+0800) Subject: add testcases of object and token X-Git-Tag: json-c-0.14-20200419~66^2~1 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=144657299772c7fe764cd778bbd258f1aee9ecf7;p=json-c add testcases of object and token --- diff --git a/tests/test_deep_copy.c b/tests/test_deep_copy.c index 63f882b..baa4e09 100644 --- a/tests/test_deep_copy.c +++ b/tests/test_deep_copy.c @@ -18,6 +18,9 @@ static const char *json_str1 = " \"GlossDiv\": {" " \"title\": \"S\"," " \"null_obj\": null, " +" \"exixt\": false," +" \"quantity\":20," +" \"univalent\":19.8," " \"GlossList\": {" " \"GlossEntry\": {" " \"ID\": \"SGML\"," diff --git a/tests/test_deep_copy.expected b/tests/test_deep_copy.expected index d009e94..4f0c39d 100644 --- a/tests/test_deep_copy.expected +++ b/tests/test_deep_copy.expected @@ -13,6 +13,9 @@ Printing JSON objects for visual inspection "GlossDiv":{ "title":"S", "null_obj":null, + "exixt":false, + "quantity":20, + "univalent":19.8, "GlossList":{ "GlossEntry":{ "ID":"SGML", diff --git a/tests/test_double_serializer.c b/tests/test_double_serializer.c index ddace8e..773527a 100644 --- a/tests/test_double_serializer.c +++ b/tests/test_double_serializer.c @@ -31,6 +31,12 @@ int main() json_object_set_serializer(obj, NULL, NULL, NULL); printf("obj.to_string(reset)=%s\n", json_object_to_json_string(obj)); + json_object_put(obj); + printf("Test no zero reset serializer:\n"); + obj = json_object_new_double(3.1415000); + json_object_set_serializer(obj, json_object_double_to_json_string, "%.17g", NULL); + printf("obj.to_string(reset)=%s\n", json_object_to_json_string_ext(obj, 4)); + json_object_put(obj); obj = json_object_new_double(0.52381); diff --git a/tests/test_double_serializer.expected b/tests/test_double_serializer.expected index 221d832..8e3b7e4 100644 --- a/tests/test_double_serializer.expected +++ b/tests/test_double_serializer.expected @@ -6,6 +6,8 @@ Test explicit serializer with custom userdata: obj.to_string(custom)=test Test reset serializer: obj.to_string(reset)=0.5 +Test no zero reset serializer: +obj.to_string(reset)=3.1415000000000002 obj.to_string(default format)=0.52381 obj.to_string(with global format)=x0.524y obj.to_string(with thread format)=T0.52X diff --git a/tests/test_parse.c b/tests/test_parse.c index 0c8fc4b..4469498 100644 --- a/tests/test_parse.c +++ b/tests/test_parse.c @@ -49,7 +49,15 @@ static void test_basic_parse() single_basic_parse("/* hello */\"foo\"", 0); single_basic_parse("// hello\n\"foo\"", 0); single_basic_parse("\"foo\"blue", 0); + single_basic_parse("\'foo\'", 0); single_basic_parse("\"\\u0041\\u0042\\u0043\"", 0); + single_basic_parse("\"\\u4e16\\u754c\\u00df\"", 0); + single_basic_parse("\"\\u4E16\"", 0); + single_basic_parse("\"\\u4e1\"", 0); + single_basic_parse("\"\\u4e1@\"", 0); + single_basic_parse("\"\\ud840\\u4e16\"", 0); + single_basic_parse("\"\\ud840\"", 0); + single_basic_parse("\"\\udd27\"", 0); // Test with a "short" high surrogate single_basic_parse("[9,'\\uDAD", 0); single_basic_parse("null", 0); @@ -91,7 +99,9 @@ static void test_basic_parse() single_basic_parse("12.3xxx", 0); single_basic_parse("{\"FoO\" : -12.3E512}", 0); + single_basic_parse("{\"FoO\" : -12.3e512}", 0); single_basic_parse("{\"FoO\" : -12.3E51.2}", 0); /* non-sensical, returns null */ + single_basic_parse("{\"FoO\" : -12.3E512E12}", 0); /* non-sensical, returns null */ single_basic_parse("[\"\\n\"]", 0); single_basic_parse("[\"\\nabc\\n\"]", 0); single_basic_parse("[null]", 0); @@ -100,16 +110,20 @@ static void test_basic_parse() single_basic_parse("[\"abc\",null,\"def\",12]", 0); single_basic_parse("{}", 0); single_basic_parse("{ \"foo\": \"bar\" }", 0); + single_basic_parse("{ \'foo\': \'bar\' }", 0); single_basic_parse("{ \"foo\": \"bar\", \"baz\": null, \"bool0\": true }", 0); single_basic_parse("{ \"foo\": [null, \"foo\"] }", 0); single_basic_parse("{ \"abc\": 12, \"foo\": \"bar\", \"bool0\": false, \"bool1\": true, \"arr\": [ 1, 2, 3, null, 5 ] }", 0); single_basic_parse("{ \"abc\": \"blue\nred\\ngreen\" }", 0); // Clear serializer for these tests so we see the actual parsed value. + single_basic_parse("null", 1); + single_basic_parse("false", 1); single_basic_parse("[0e]", 1); single_basic_parse("[0e+]", 1); single_basic_parse("[0e+-1]", 1); single_basic_parse("[18446744073709551616]", 1); + single_basic_parse("\"hello world!\"", 1); } static void test_utf8_parse() @@ -177,6 +191,16 @@ struct incremental_step { { "{ \"foo\": 456 }", -1, -1, json_tokener_success, 1 }, { "{ \"foo\": 789 }", -1, -1, json_tokener_success, 1 }, + /* Check the comment parse*/ + { "/* hello */{ \"foo\"", -1, -1, json_tokener_continue, 0 }, + { "/* hello */:/* hello */", -1, -1, json_tokener_continue, 0 }, + { "\"bar\"/* hello */", -1, -1, json_tokener_continue, 0 }, + { "}/* hello */", -1, -1, json_tokener_success, 1 }, + { "/ hello ", -1, 1, json_tokener_error_parse_comment, 1 }, + { "/* hello\"foo\"", -1, -1, json_tokener_continue, 1 }, + { "/* hello*\"foo\"", -1, -1, json_tokener_continue, 1 }, + { "// hello\"foo\"", -1, -1, json_tokener_continue, 1 }, + /* Check a basic incremental parse */ { "{ \"foo", -1, -1, json_tokener_continue, 0 }, { "\": {\"bar", -1, -1, json_tokener_continue, 0 }, @@ -201,6 +225,8 @@ struct incremental_step { /* This should parse as the number 12, since it continues the "1" */ { "2", 2, 1, json_tokener_success, 0 }, { "12{", 3, 2, json_tokener_success, 1 }, + /* Parse number in strict model */ + { "[02]", -1, 3, json_tokener_error_parse_number, 3 }, /* Similar tests for other kinds of objects: */ /* These could all return success immediately, since regardless of @@ -293,8 +319,17 @@ struct incremental_step { { "\"\\/\"", -1, -1, json_tokener_success, 0 }, // Escaping a forward slash is optional { "\"/\"", -1, -1, json_tokener_success, 0 }, + /* Check wrong escape sequences */ + { "\"\\a\"", -1, 2, json_tokener_error_parse_string, 1 }, + + /* Check '\'' in strict model */ + { "\'foo\'", -1, 0, json_tokener_error_parse_unexpected, 3 }, + /* Parse array/object */ { "[1,2,3]", -1, -1, json_tokener_success, 0 }, + { "[1,2,3}", -1, 6, json_tokener_error_parse_array, 1 }, + { "{\"a\"}", -1, 4, json_tokener_error_parse_object_key_sep, 1 }, + { "{\"a\":1]", -1, 6, json_tokener_error_parse_object_value_sep, 1 }, /* This behaviour doesn't entirely follow the json spec, but until we have a way to specify how strict to be we follow Postel's Law and be liberal diff --git a/tests/test_parse.expected b/tests/test_parse.expected index 5d3976a..f8c6b62 100644 --- a/tests/test_parse.expected +++ b/tests/test_parse.expected @@ -3,7 +3,15 @@ new_obj.to_string(/* hello */"foo")="foo" new_obj.to_string(// hello "foo")="foo" new_obj.to_string("foo"blue)="foo" +new_obj.to_string('foo')="foo" new_obj.to_string("\u0041\u0042\u0043")="ABC" +new_obj.to_string("\u4e16\u754c\u00df")="世界ß" +new_obj.to_string("\u4E16")="世" +new_obj.to_string("\u4e1")=null +new_obj.to_string("\u4e1@")=null +new_obj.to_string("\ud840\u4e16")="�世" +new_obj.to_string("\ud840")="�" +new_obj.to_string("\udd27")="�" new_obj.to_string([9,'\uDAD)=null new_obj.to_string(null)=null new_obj.to_string(NaN)=NaN @@ -32,7 +40,9 @@ new_obj.to_string(12.3.4)=null new_obj.to_string(2015-01-15)=null new_obj.to_string(12.3xxx)=12.3 new_obj.to_string({"FoO" : -12.3E512})={ "FoO": -12.3E512 } +new_obj.to_string({"FoO" : -12.3e512})={ "FoO": -12.3e512 } new_obj.to_string({"FoO" : -12.3E51.2})=null +new_obj.to_string({"FoO" : -12.3E512E12})=null new_obj.to_string(["\n"])=[ "\n" ] new_obj.to_string(["\nabc\n"])=[ "\nabc\n" ] new_obj.to_string([null])=[ null ] @@ -41,15 +51,19 @@ new_obj.to_string([false])=[ false ] new_obj.to_string(["abc",null,"def",12])=[ "abc", null, "def", 12 ] new_obj.to_string({})={ } new_obj.to_string({ "foo": "bar" })={ "foo": "bar" } +new_obj.to_string({ 'foo': 'bar' })={ "foo": "bar" } new_obj.to_string({ "foo": "bar", "baz": null, "bool0": true })={ "foo": "bar", "baz": null, "bool0": true } new_obj.to_string({ "foo": [null, "foo"] })={ "foo": [ null, "foo" ] } new_obj.to_string({ "abc": 12, "foo": "bar", "bool0": false, "bool1": true, "arr": [ 1, 2, 3, null, 5 ] })={ "abc": 12, "foo": "bar", "bool0": false, "bool1": true, "arr": [ 1, 2, 3, null, 5 ] } new_obj.to_string({ "abc": "blue red\ngreen" })={ "abc": "blue\nred\ngreen" } +new_obj.to_string(null)=null +new_obj.to_string(false)=false new_obj.to_string([0e])=[ 0.0 ] new_obj.to_string([0e+])=[ 0.0 ] new_obj.to_string([0e+-1])=null new_obj.to_string([18446744073709551616])=[ 9223372036854775807 ] +new_obj.to_string("hello world!")="hello world!" ================================== new_obj.to_string()=null new_obj.to_string({})=null @@ -63,6 +77,14 @@ json_tokener_parse({ "foo) ... got error as expected json_tokener_parse_ex(tok, { "foo": 123 }, 14) ... OK: got object of type [object]: { "foo": 123 } json_tokener_parse_ex(tok, { "foo": 456 }, 14) ... OK: got object of type [object]: { "foo": 456 } json_tokener_parse_ex(tok, { "foo": 789 }, 14) ... OK: got object of type [object]: { "foo": 789 } +json_tokener_parse_ex(tok, /* hello */{ "foo", 18) ... OK: got correct error: continue +json_tokener_parse_ex(tok, /* hello */:/* hello */, 23) ... OK: got correct error: continue +json_tokener_parse_ex(tok, "bar"/* hello */, 16) ... OK: got correct error: continue +json_tokener_parse_ex(tok, }/* hello */, 12) ... OK: got object of type [object]: { "foo": "bar" } +json_tokener_parse_ex(tok, / hello , 8) ... OK: got correct error: expected comment +json_tokener_parse_ex(tok, /* hello"foo", 13) ... OK: got correct error: continue +json_tokener_parse_ex(tok, /* hello*"foo", 14) ... OK: got correct error: continue +json_tokener_parse_ex(tok, // hello"foo", 13) ... OK: got correct error: continue json_tokener_parse_ex(tok, { "foo , 6) ... OK: got correct error: continue json_tokener_parse_ex(tok, ": {"bar , 8) ... OK: got correct error: continue json_tokener_parse_ex(tok, ":13}} , 6) ... OK: got object of type [object]: { "foo": { "bar": 13 } } @@ -77,6 +99,7 @@ json_tokener_parse_ex(tok, "Y" , 3) ... OK: got object of type [string json_tokener_parse_ex(tok, 1 , 1) ... OK: got correct error: continue json_tokener_parse_ex(tok, 2 , 2) ... OK: got object of type [int]: 12 json_tokener_parse_ex(tok, 12{ , 3) ... OK: got object of type [int]: 12 +json_tokener_parse_ex(tok, [02] , 4) ... OK: got correct error: number expected json_tokener_parse_ex(tok, false , 5) ... OK: got correct error: continue json_tokener_parse_ex(tok, false , 6) ... OK: got object of type [boolean]: false json_tokener_parse_ex(tok, true , 4) ... OK: got correct error: continue @@ -138,10 +161,15 @@ json_tokener_parse_ex(tok, "\r" , 4) ... OK: got object of type [string json_tokener_parse_ex(tok, "\t" , 4) ... OK: got object of type [string]: "\t" json_tokener_parse_ex(tok, "\/" , 4) ... OK: got object of type [string]: "\/" json_tokener_parse_ex(tok, "/" , 3) ... OK: got object of type [string]: "\/" +json_tokener_parse_ex(tok, "\a" , 4) ... OK: got correct error: invalid string sequence +json_tokener_parse_ex(tok, 'foo' , 5) ... OK: got correct error: unexpected character json_tokener_parse_ex(tok, [1,2,3] , 7) ... OK: got object of type [array]: [ 1, 2, 3 ] +json_tokener_parse_ex(tok, [1,2,3} , 7) ... OK: got correct error: array value separator ',' expected +json_tokener_parse_ex(tok, {"a"} , 5) ... OK: got correct error: object property name separator ':' expected +json_tokener_parse_ex(tok, {"a":1] , 7) ... OK: got correct error: object value separator ',' expected json_tokener_parse_ex(tok, [1,2,3,] , 8) ... OK: got object of type [array]: [ 1, 2, 3 ] json_tokener_parse_ex(tok, [1,2,,3,] , 9) ... OK: got correct error: unexpected character json_tokener_parse_ex(tok, [1,2,3,] , 8) ... OK: got correct error: unexpected character json_tokener_parse_ex(tok, {"a":1,} , 8) ... OK: got correct error: unexpected character -End Incremental Tests OK=83 ERROR=0 +End Incremental Tests OK=97 ERROR=0 ==================================