]> granicus.if.org Git - postgresql/commitdiff
Fix a couple of cases of JSON output.
authorAndrew Dunstan <andrew@dunslane.net>
Mon, 20 Feb 2012 20:01:03 +0000 (15:01 -0500)
committerAndrew Dunstan <andrew@dunslane.net>
Mon, 20 Feb 2012 20:01:03 +0000 (15:01 -0500)
First, as noted by Itagaki Takahiro, a datum of type JSON doesn't
need to be escaped. Second, ensure that numeric output not in
the form of a legal JSON number is quoted and escaped.

src/backend/utils/adt/json.c
src/test/regress/expected/json.out
src/test/regress/sql/json.sql

index 60addf2871f3439fc0cafa2e3cb64a960c627cb8..feda0e00357407a7cdd7ac7a8239771c3d868955 100644 (file)
@@ -84,6 +84,10 @@ static void array_dim_to_json(StringInfo result, int dim, int ndims,int * dims,
                                                          Oid typoutputfunc, bool use_line_feeds);
 static void array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds);
 
+/* fake type category for JSON so we can distinguish it in datum_to_json */
+#define TYPCATEGORY_JSON 'j'
+/* letters appearing in numeric output that aren't valid in a JSON number */
+#define NON_NUMERIC_LETTER "NnAnIiFfTtYy"
 /*
  * Input.
  */
@@ -707,10 +711,20 @@ datum_to_json(Datum val, StringInfo result, TYPCATEGORY tcategory,
                case TYPCATEGORY_NUMERIC:
                        outputstr = OidOutputFunctionCall(typoutputfunc, val);
                        /*
-                        * Don't call escape_json here. Numeric output should
-                        * be a valid JSON number and JSON numbers shouldn't
-                        * be quoted.
+                        * Don't call escape_json here if it's a valid JSON
+                        * number. Numeric output should usually be a valid 
+                        * JSON number and JSON numbers shouldn't be quoted. 
+                        * Quote cases like "Nan" and "Infinity", however.
                         */
+                       if (strpbrk(outputstr,NON_NUMERIC_LETTER) == NULL)
+                               appendStringInfoString(result, outputstr);
+                       else
+                               escape_json(result, outputstr);
+                       pfree(outputstr);
+                       break;
+               case TYPCATEGORY_JSON:
+                       /* JSON will already be escaped */
+                       outputstr = OidOutputFunctionCall(typoutputfunc, val);
                        appendStringInfoString(result, outputstr);
                        pfree(outputstr);
                        break;
@@ -806,9 +820,10 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
                                          typalign, &elements, &nulls,
                                          &nitems);
 
-       /* can't have an array of arrays, so this is the only special case here */
        if (element_type == RECORDOID)
                tcategory = TYPCATEGORY_COMPOSITE;
+       else if (element_type == JSONOID)
+               tcategory = TYPCATEGORY_JSON;
        else
                tcategory = TypeCategory(element_type);
 
@@ -876,6 +891,8 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
                        tcategory = TYPCATEGORY_ARRAY;
                else if (tupdesc->attrs[i]->atttypid == RECORDOID)
                        tcategory = TYPCATEGORY_COMPOSITE;
+               else if (tupdesc->attrs[i]->atttypid == JSONOID)
+                       tcategory = TYPCATEGORY_JSON;
                else
                        tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
 
index 2b573511139c43be3ed469e94ddafb751432e73f..fa8415cdb77c5a5e6adac2084b2bb53e9389bc54 100644 (file)
@@ -367,3 +367,33 @@ SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),
  {"f1":[5,6,7,8,9,10]}
 (1 row)
 
+-- non-numeric output
+SELECT row_to_json(q)
+FROM (SELECT 'NaN'::float8 AS "float8field") q;
+      row_to_json      
+-----------------------
+ {"float8field":"NaN"}
+(1 row)
+
+SELECT row_to_json(q)
+FROM (SELECT 'Infinity'::float8 AS "float8field") q;
+        row_to_json         
+----------------------------
+ {"float8field":"Infinity"}
+(1 row)
+
+SELECT row_to_json(q)
+FROM (SELECT '-Infinity'::float8 AS "float8field") q;
+         row_to_json         
+-----------------------------
+ {"float8field":"-Infinity"}
+(1 row)
+
+-- json input
+SELECT row_to_json(q)
+FROM (SELECT '{"a":1,"b": [2,3,4,"d","e","f"],"c":{"p":1,"q":2}}'::json AS "jsonfield") q;
+                           row_to_json                            
+------------------------------------------------------------------
+ {"jsonfield":{"a":1,"b": [2,3,4,"d","e","f"],"c":{"p":1,"q":2}}}
+(1 row)
+
index 61273555aae1fe874d19e955dc17b115d0f3140e..ab1c41c1c4c98562c25fef87296696507dd9ac6c 100644 (file)
@@ -97,3 +97,18 @@ SELECT row_to_json(q,true)
 FROM rows q;
 
 SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
+
+-- non-numeric output
+SELECT row_to_json(q)
+FROM (SELECT 'NaN'::float8 AS "float8field") q;
+
+SELECT row_to_json(q)
+FROM (SELECT 'Infinity'::float8 AS "float8field") q;
+
+SELECT row_to_json(q)
+FROM (SELECT '-Infinity'::float8 AS "float8field") q;
+
+-- json input
+SELECT row_to_json(q)
+FROM (SELECT '{"a":1,"b": [2,3,4,"d","e","f"],"c":{"p":1,"q":2}}'::json AS "jsonfield") q;
+