]> granicus.if.org Git - postgresql/blob - src/backend/utils/adt/json.c
Fix jsonb Unicode escape processing, and in consequence disallow \u0000.
[postgresql] / src / backend / utils / adt / json.c
1 /*-------------------------------------------------------------------------
2  *
3  * json.c
4  *              JSON data type support.
5  *
6  * Portions Copyright (c) 1996-2015, PostgreSQL Global Development Group
7  * Portions Copyright (c) 1994, Regents of the University of California
8  *
9  * IDENTIFICATION
10  *        src/backend/utils/adt/json.c
11  *
12  *-------------------------------------------------------------------------
13  */
14 #include "postgres.h"
15
16 #include "access/htup_details.h"
17 #include "access/transam.h"
18 #include "catalog/pg_type.h"
19 #include "executor/spi.h"
20 #include "lib/stringinfo.h"
21 #include "libpq/pqformat.h"
22 #include "mb/pg_wchar.h"
23 #include "miscadmin.h"
24 #include "parser/parse_coerce.h"
25 #include "utils/array.h"
26 #include "utils/builtins.h"
27 #include "utils/date.h"
28 #include "utils/datetime.h"
29 #include "utils/lsyscache.h"
30 #include "utils/json.h"
31 #include "utils/jsonapi.h"
32 #include "utils/typcache.h"
33 #include "utils/syscache.h"
34
35 /*
36  * The context of the parser is maintained by the recursive descent
37  * mechanism, but is passed explicitly to the error reporting routine
38  * for better diagnostics.
39  */
40 typedef enum                                    /* contexts of JSON parser */
41 {
42         JSON_PARSE_VALUE,                       /* expecting a value */
43         JSON_PARSE_STRING,                      /* expecting a string (for a field name) */
44         JSON_PARSE_ARRAY_START,         /* saw '[', expecting value or ']' */
45         JSON_PARSE_ARRAY_NEXT,          /* saw array element, expecting ',' or ']' */
46         JSON_PARSE_OBJECT_START,        /* saw '{', expecting label or '}' */
47         JSON_PARSE_OBJECT_LABEL,        /* saw object label, expecting ':' */
48         JSON_PARSE_OBJECT_NEXT,         /* saw object value, expecting ',' or '}' */
49         JSON_PARSE_OBJECT_COMMA,        /* saw object ',', expecting next label */
50         JSON_PARSE_END                          /* saw the end of a document, expect nothing */
51 } JsonParseContext;
52
53 typedef enum                                    /* type categories for datum_to_json */
54 {
55         JSONTYPE_NULL,                          /* null, so we didn't bother to identify */
56         JSONTYPE_BOOL,                          /* boolean (built-in types only) */
57         JSONTYPE_NUMERIC,                       /* numeric (ditto) */
58         JSONTYPE_DATE,                          /* we use special formatting for datetimes */
59         JSONTYPE_TIMESTAMP,
60         JSONTYPE_TIMESTAMPTZ,
61         JSONTYPE_JSON,                          /* JSON itself (and JSONB) */
62         JSONTYPE_ARRAY,                         /* array */
63         JSONTYPE_COMPOSITE,                     /* composite */
64         JSONTYPE_CAST,                          /* something with an explicit cast to JSON */
65         JSONTYPE_OTHER                          /* all else */
66 } JsonTypeCategory;
67
68 static inline void json_lex(JsonLexContext *lex);
69 static inline void json_lex_string(JsonLexContext *lex);
70 static inline void json_lex_number(JsonLexContext *lex, char *s, bool *num_err);
71 static inline void parse_scalar(JsonLexContext *lex, JsonSemAction *sem);
72 static void parse_object_field(JsonLexContext *lex, JsonSemAction *sem);
73 static void parse_object(JsonLexContext *lex, JsonSemAction *sem);
74 static void parse_array_element(JsonLexContext *lex, JsonSemAction *sem);
75 static void parse_array(JsonLexContext *lex, JsonSemAction *sem);
76 static void report_parse_error(JsonParseContext ctx, JsonLexContext *lex);
77 static void report_invalid_token(JsonLexContext *lex);
78 static int      report_json_context(JsonLexContext *lex);
79 static char *extract_mb_char(char *s);
80 static void composite_to_json(Datum composite, StringInfo result,
81                                   bool use_line_feeds);
82 static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
83                                   Datum *vals, bool *nulls, int *valcount,
84                                   JsonTypeCategory tcategory, Oid outfuncoid,
85                                   bool use_line_feeds);
86 static void array_to_json_internal(Datum array, StringInfo result,
87                                            bool use_line_feeds);
88 static void json_categorize_type(Oid typoid,
89                                          JsonTypeCategory *tcategory,
90                                          Oid *outfuncoid);
91 static void datum_to_json(Datum val, bool is_null, StringInfo result,
92                           JsonTypeCategory tcategory, Oid outfuncoid,
93                           bool key_scalar);
94 static void add_json(Datum val, bool is_null, StringInfo result,
95                  Oid val_type, bool key_scalar);
96 static text *catenate_stringinfo_string(StringInfo buffer, const char *addon);
97
98 /* the null action object used for pure validation */
99 static JsonSemAction nullSemAction =
100 {
101         NULL, NULL, NULL, NULL, NULL,
102         NULL, NULL, NULL, NULL, NULL
103 };
104
105 /* Recursive Descent parser support routines */
106
107 /*
108  * lex_peek
109  *
110  * what is the current look_ahead token?
111 */
112 static inline JsonTokenType
113 lex_peek(JsonLexContext *lex)
114 {
115         return lex->token_type;
116 }
117
118 /*
119  * lex_accept
120  *
121  * accept the look_ahead token and move the lexer to the next token if the
122  * look_ahead token matches the token parameter. In that case, and if required,
123  * also hand back the de-escaped lexeme.
124  *
125  * returns true if the token matched, false otherwise.
126  */
127 static inline bool
128 lex_accept(JsonLexContext *lex, JsonTokenType token, char **lexeme)
129 {
130         if (lex->token_type == token)
131         {
132                 if (lexeme != NULL)
133                 {
134                         if (lex->token_type == JSON_TOKEN_STRING)
135                         {
136                                 if (lex->strval != NULL)
137                                         *lexeme = pstrdup(lex->strval->data);
138                         }
139                         else
140                         {
141                                 int                     len = (lex->token_terminator - lex->token_start);
142                                 char       *tokstr = palloc(len + 1);
143
144                                 memcpy(tokstr, lex->token_start, len);
145                                 tokstr[len] = '\0';
146                                 *lexeme = tokstr;
147                         }
148                 }
149                 json_lex(lex);
150                 return true;
151         }
152         return false;
153 }
154
155 /*
156  * lex_accept
157  *
158  * move the lexer to the next token if the current look_ahead token matches
159  * the parameter token. Otherwise, report an error.
160  */
161 static inline void
162 lex_expect(JsonParseContext ctx, JsonLexContext *lex, JsonTokenType token)
163 {
164         if (!lex_accept(lex, token, NULL))
165                 report_parse_error(ctx, lex);;
166 }
167
168 /* chars to consider as part of an alphanumeric token */
169 #define JSON_ALPHANUMERIC_CHAR(c)  \
170         (((c) >= 'a' && (c) <= 'z') || \
171          ((c) >= 'A' && (c) <= 'Z') || \
172          ((c) >= '0' && (c) <= '9') || \
173          (c) == '_' || \
174          IS_HIGHBIT_SET(c))
175
176 /* utility function to check if a string is a valid JSON number */
177 extern bool
178 IsValidJsonNumber(const char *str, int len)
179 {
180         bool            numeric_error;
181         JsonLexContext dummy_lex;
182
183
184         /*
185          * json_lex_number expects a leading  '-' to have been eaten already.
186          *
187          * having to cast away the constness of str is ugly, but there's not much
188          * easy alternative.
189          */
190         if (*str == '-')
191         {
192                 dummy_lex.input = (char *) str + 1;
193                 dummy_lex.input_length = len - 1;
194         }
195         else
196         {
197                 dummy_lex.input = (char *) str;
198                 dummy_lex.input_length = len;
199         }
200
201         json_lex_number(&dummy_lex, dummy_lex.input, &numeric_error);
202
203         return !numeric_error;
204 }
205
206 /*
207  * Input.
208  */
209 Datum
210 json_in(PG_FUNCTION_ARGS)
211 {
212         char       *json = PG_GETARG_CSTRING(0);
213         text       *result = cstring_to_text(json);
214         JsonLexContext *lex;
215
216         /* validate it */
217         lex = makeJsonLexContext(result, false);
218         pg_parse_json(lex, &nullSemAction);
219
220         /* Internal representation is the same as text, for now */
221         PG_RETURN_TEXT_P(result);
222 }
223
224 /*
225  * Output.
226  */
227 Datum
228 json_out(PG_FUNCTION_ARGS)
229 {
230         /* we needn't detoast because text_to_cstring will handle that */
231         Datum           txt = PG_GETARG_DATUM(0);
232
233         PG_RETURN_CSTRING(TextDatumGetCString(txt));
234 }
235
236 /*
237  * Binary send.
238  */
239 Datum
240 json_send(PG_FUNCTION_ARGS)
241 {
242         text       *t = PG_GETARG_TEXT_PP(0);
243         StringInfoData buf;
244
245         pq_begintypsend(&buf);
246         pq_sendtext(&buf, VARDATA_ANY(t), VARSIZE_ANY_EXHDR(t));
247         PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
248 }
249
250 /*
251  * Binary receive.
252  */
253 Datum
254 json_recv(PG_FUNCTION_ARGS)
255 {
256         StringInfo      buf = (StringInfo) PG_GETARG_POINTER(0);
257         char       *str;
258         int                     nbytes;
259         JsonLexContext *lex;
260
261         str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes);
262
263         /* Validate it. */
264         lex = makeJsonLexContextCstringLen(str, nbytes, false);
265         pg_parse_json(lex, &nullSemAction);
266
267         PG_RETURN_TEXT_P(cstring_to_text_with_len(str, nbytes));
268 }
269
270 /*
271  * makeJsonLexContext
272  *
273  * lex constructor, with or without StringInfo object
274  * for de-escaped lexemes.
275  *
276  * Without is better as it makes the processing faster, so only make one
277  * if really required.
278  *
279  * If you already have the json as a text* value, use the first of these
280  * functions, otherwise use  makeJsonLexContextCstringLen().
281  */
282 JsonLexContext *
283 makeJsonLexContext(text *json, bool need_escapes)
284 {
285         return makeJsonLexContextCstringLen(VARDATA(json),
286                                                                                 VARSIZE(json) - VARHDRSZ,
287                                                                                 need_escapes);
288 }
289
290 JsonLexContext *
291 makeJsonLexContextCstringLen(char *json, int len, bool need_escapes)
292 {
293         JsonLexContext *lex = palloc0(sizeof(JsonLexContext));
294
295         lex->input = lex->token_terminator = lex->line_start = json;
296         lex->line_number = 1;
297         lex->input_length = len;
298         if (need_escapes)
299                 lex->strval = makeStringInfo();
300         return lex;
301 }
302
303 /*
304  * pg_parse_json
305  *
306  * Publicly visible entry point for the JSON parser.
307  *
308  * lex is a lexing context, set up for the json to be processed by calling
309  * makeJsonLexContext(). sem is a strucure of function pointers to semantic
310  * action routines to be called at appropriate spots during parsing, and a
311  * pointer to a state object to be passed to those routines.
312  */
313 void
314 pg_parse_json(JsonLexContext *lex, JsonSemAction *sem)
315 {
316         JsonTokenType tok;
317
318         /* get the initial token */
319         json_lex(lex);
320
321         tok = lex_peek(lex);
322
323         /* parse by recursive descent */
324         switch (tok)
325         {
326                 case JSON_TOKEN_OBJECT_START:
327                         parse_object(lex, sem);
328                         break;
329                 case JSON_TOKEN_ARRAY_START:
330                         parse_array(lex, sem);
331                         break;
332                 default:
333                         parse_scalar(lex, sem);         /* json can be a bare scalar */
334         }
335
336         lex_expect(JSON_PARSE_END, lex, JSON_TOKEN_END);
337
338 }
339
340 /*
341  *      Recursive Descent parse routines. There is one for each structural
342  *      element in a json document:
343  *        - scalar (string, number, true, false, null)
344  *        - array  ( [ ] )
345  *        - array element
346  *        - object ( { } )
347  *        - object field
348  */
349 static inline void
350 parse_scalar(JsonLexContext *lex, JsonSemAction *sem)
351 {
352         char       *val = NULL;
353         json_scalar_action sfunc = sem->scalar;
354         char      **valaddr;
355         JsonTokenType tok = lex_peek(lex);
356
357         valaddr = sfunc == NULL ? NULL : &val;
358
359         /* a scalar must be a string, a number, true, false, or null */
360         switch (tok)
361         {
362                 case JSON_TOKEN_TRUE:
363                         lex_accept(lex, JSON_TOKEN_TRUE, valaddr);
364                         break;
365                 case JSON_TOKEN_FALSE:
366                         lex_accept(lex, JSON_TOKEN_FALSE, valaddr);
367                         break;
368                 case JSON_TOKEN_NULL:
369                         lex_accept(lex, JSON_TOKEN_NULL, valaddr);
370                         break;
371                 case JSON_TOKEN_NUMBER:
372                         lex_accept(lex, JSON_TOKEN_NUMBER, valaddr);
373                         break;
374                 case JSON_TOKEN_STRING:
375                         lex_accept(lex, JSON_TOKEN_STRING, valaddr);
376                         break;
377                 default:
378                         report_parse_error(JSON_PARSE_VALUE, lex);
379         }
380
381         if (sfunc != NULL)
382                 (*sfunc) (sem->semstate, val, tok);
383 }
384
385 static void
386 parse_object_field(JsonLexContext *lex, JsonSemAction *sem)
387 {
388         /*
389          * An object field is "fieldname" : value where value can be a scalar,
390          * object or array.  Note: in user-facing docs and error messages, we
391          * generally call a field name a "key".
392          */
393
394         char       *fname = NULL;       /* keep compiler quiet */
395         json_ofield_action ostart = sem->object_field_start;
396         json_ofield_action oend = sem->object_field_end;
397         bool            isnull;
398         char      **fnameaddr = NULL;
399         JsonTokenType tok;
400
401         if (ostart != NULL || oend != NULL)
402                 fnameaddr = &fname;
403
404         if (!lex_accept(lex, JSON_TOKEN_STRING, fnameaddr))
405                 report_parse_error(JSON_PARSE_STRING, lex);
406
407         lex_expect(JSON_PARSE_OBJECT_LABEL, lex, JSON_TOKEN_COLON);
408
409         tok = lex_peek(lex);
410         isnull = tok == JSON_TOKEN_NULL;
411
412         if (ostart != NULL)
413                 (*ostart) (sem->semstate, fname, isnull);
414
415         switch (tok)
416         {
417                 case JSON_TOKEN_OBJECT_START:
418                         parse_object(lex, sem);
419                         break;
420                 case JSON_TOKEN_ARRAY_START:
421                         parse_array(lex, sem);
422                         break;
423                 default:
424                         parse_scalar(lex, sem);
425         }
426
427         if (oend != NULL)
428                 (*oend) (sem->semstate, fname, isnull);
429 }
430
431 static void
432 parse_object(JsonLexContext *lex, JsonSemAction *sem)
433 {
434         /*
435          * an object is a possibly empty sequence of object fields, separated by
436          * commas and surrounded by curly braces.
437          */
438         json_struct_action ostart = sem->object_start;
439         json_struct_action oend = sem->object_end;
440         JsonTokenType tok;
441
442         if (ostart != NULL)
443                 (*ostart) (sem->semstate);
444
445         /*
446          * Data inside an object is at a higher nesting level than the object
447          * itself. Note that we increment this after we call the semantic routine
448          * for the object start and restore it before we call the routine for the
449          * object end.
450          */
451         lex->lex_level++;
452
453         /* we know this will succeeed, just clearing the token */
454         lex_expect(JSON_PARSE_OBJECT_START, lex, JSON_TOKEN_OBJECT_START);
455
456         tok = lex_peek(lex);
457         switch (tok)
458         {
459                 case JSON_TOKEN_STRING:
460                         parse_object_field(lex, sem);
461                         while (lex_accept(lex, JSON_TOKEN_COMMA, NULL))
462                                 parse_object_field(lex, sem);
463                         break;
464                 case JSON_TOKEN_OBJECT_END:
465                         break;
466                 default:
467                         /* case of an invalid initial token inside the object */
468                         report_parse_error(JSON_PARSE_OBJECT_START, lex);
469         }
470
471         lex_expect(JSON_PARSE_OBJECT_NEXT, lex, JSON_TOKEN_OBJECT_END);
472
473         lex->lex_level--;
474
475         if (oend != NULL)
476                 (*oend) (sem->semstate);
477 }
478
479 static void
480 parse_array_element(JsonLexContext *lex, JsonSemAction *sem)
481 {
482         json_aelem_action astart = sem->array_element_start;
483         json_aelem_action aend = sem->array_element_end;
484         JsonTokenType tok = lex_peek(lex);
485
486         bool            isnull;
487
488         isnull = tok == JSON_TOKEN_NULL;
489
490         if (astart != NULL)
491                 (*astart) (sem->semstate, isnull);
492
493         /* an array element is any object, array or scalar */
494         switch (tok)
495         {
496                 case JSON_TOKEN_OBJECT_START:
497                         parse_object(lex, sem);
498                         break;
499                 case JSON_TOKEN_ARRAY_START:
500                         parse_array(lex, sem);
501                         break;
502                 default:
503                         parse_scalar(lex, sem);
504         }
505
506         if (aend != NULL)
507                 (*aend) (sem->semstate, isnull);
508 }
509
510 static void
511 parse_array(JsonLexContext *lex, JsonSemAction *sem)
512 {
513         /*
514          * an array is a possibly empty sequence of array elements, separated by
515          * commas and surrounded by square brackets.
516          */
517         json_struct_action astart = sem->array_start;
518         json_struct_action aend = sem->array_end;
519
520         if (astart != NULL)
521                 (*astart) (sem->semstate);
522
523         /*
524          * Data inside an array is at a higher nesting level than the array
525          * itself. Note that we increment this after we call the semantic routine
526          * for the array start and restore it before we call the routine for the
527          * array end.
528          */
529         lex->lex_level++;
530
531         lex_expect(JSON_PARSE_ARRAY_START, lex, JSON_TOKEN_ARRAY_START);
532         if (lex_peek(lex) != JSON_TOKEN_ARRAY_END)
533         {
534
535                 parse_array_element(lex, sem);
536
537                 while (lex_accept(lex, JSON_TOKEN_COMMA, NULL))
538                         parse_array_element(lex, sem);
539         }
540
541         lex_expect(JSON_PARSE_ARRAY_NEXT, lex, JSON_TOKEN_ARRAY_END);
542
543         lex->lex_level--;
544
545         if (aend != NULL)
546                 (*aend) (sem->semstate);
547 }
548
549 /*
550  * Lex one token from the input stream.
551  */
552 static inline void
553 json_lex(JsonLexContext *lex)
554 {
555         char       *s;
556         int                     len;
557
558         /* Skip leading whitespace. */
559         s = lex->token_terminator;
560         len = s - lex->input;
561         while (len < lex->input_length &&
562                    (*s == ' ' || *s == '\t' || *s == '\n' || *s == '\r'))
563         {
564                 if (*s == '\n')
565                         ++lex->line_number;
566                 ++s;
567                 ++len;
568         }
569         lex->token_start = s;
570
571         /* Determine token type. */
572         if (len >= lex->input_length)
573         {
574                 lex->token_start = NULL;
575                 lex->prev_token_terminator = lex->token_terminator;
576                 lex->token_terminator = s;
577                 lex->token_type = JSON_TOKEN_END;
578         }
579         else
580                 switch (*s)
581                 {
582                                 /* Single-character token, some kind of punctuation mark. */
583                         case '{':
584                                 lex->prev_token_terminator = lex->token_terminator;
585                                 lex->token_terminator = s + 1;
586                                 lex->token_type = JSON_TOKEN_OBJECT_START;
587                                 break;
588                         case '}':
589                                 lex->prev_token_terminator = lex->token_terminator;
590                                 lex->token_terminator = s + 1;
591                                 lex->token_type = JSON_TOKEN_OBJECT_END;
592                                 break;
593                         case '[':
594                                 lex->prev_token_terminator = lex->token_terminator;
595                                 lex->token_terminator = s + 1;
596                                 lex->token_type = JSON_TOKEN_ARRAY_START;
597                                 break;
598                         case ']':
599                                 lex->prev_token_terminator = lex->token_terminator;
600                                 lex->token_terminator = s + 1;
601                                 lex->token_type = JSON_TOKEN_ARRAY_END;
602                                 break;
603                         case ',':
604                                 lex->prev_token_terminator = lex->token_terminator;
605                                 lex->token_terminator = s + 1;
606                                 lex->token_type = JSON_TOKEN_COMMA;
607                                 break;
608                         case ':':
609                                 lex->prev_token_terminator = lex->token_terminator;
610                                 lex->token_terminator = s + 1;
611                                 lex->token_type = JSON_TOKEN_COLON;
612                                 break;
613                         case '"':
614                                 /* string */
615                                 json_lex_string(lex);
616                                 lex->token_type = JSON_TOKEN_STRING;
617                                 break;
618                         case '-':
619                                 /* Negative number. */
620                                 json_lex_number(lex, s + 1, NULL);
621                                 lex->token_type = JSON_TOKEN_NUMBER;
622                                 break;
623                         case '0':
624                         case '1':
625                         case '2':
626                         case '3':
627                         case '4':
628                         case '5':
629                         case '6':
630                         case '7':
631                         case '8':
632                         case '9':
633                                 /* Positive number. */
634                                 json_lex_number(lex, s, NULL);
635                                 lex->token_type = JSON_TOKEN_NUMBER;
636                                 break;
637                         default:
638                                 {
639                                         char       *p;
640
641                                         /*
642                                          * We're not dealing with a string, number, legal
643                                          * punctuation mark, or end of string.  The only legal
644                                          * tokens we might find here are true, false, and null,
645                                          * but for error reporting purposes we scan until we see a
646                                          * non-alphanumeric character.  That way, we can report
647                                          * the whole word as an unexpected token, rather than just
648                                          * some unintuitive prefix thereof.
649                                          */
650                                         for (p = s; p - s < lex->input_length - len && JSON_ALPHANUMERIC_CHAR(*p); p++)
651                                                  /* skip */ ;
652
653                                         /*
654                                          * We got some sort of unexpected punctuation or an
655                                          * otherwise unexpected character, so just complain about
656                                          * that one character.
657                                          */
658                                         if (p == s)
659                                         {
660                                                 lex->prev_token_terminator = lex->token_terminator;
661                                                 lex->token_terminator = s + 1;
662                                                 report_invalid_token(lex);
663                                         }
664
665                                         /*
666                                          * We've got a real alphanumeric token here.  If it
667                                          * happens to be true, false, or null, all is well.  If
668                                          * not, error out.
669                                          */
670                                         lex->prev_token_terminator = lex->token_terminator;
671                                         lex->token_terminator = p;
672                                         if (p - s == 4)
673                                         {
674                                                 if (memcmp(s, "true", 4) == 0)
675                                                         lex->token_type = JSON_TOKEN_TRUE;
676                                                 else if (memcmp(s, "null", 4) == 0)
677                                                         lex->token_type = JSON_TOKEN_NULL;
678                                                 else
679                                                         report_invalid_token(lex);
680                                         }
681                                         else if (p - s == 5 && memcmp(s, "false", 5) == 0)
682                                                 lex->token_type = JSON_TOKEN_FALSE;
683                                         else
684                                                 report_invalid_token(lex);
685
686                                 }
687                 }                                               /* end of switch */
688 }
689
690 /*
691  * The next token in the input stream is known to be a string; lex it.
692  */
693 static inline void
694 json_lex_string(JsonLexContext *lex)
695 {
696         char       *s;
697         int                     len;
698         int                     hi_surrogate = -1;
699
700         if (lex->strval != NULL)
701                 resetStringInfo(lex->strval);
702
703         Assert(lex->input_length > 0);
704         s = lex->token_start;
705         len = lex->token_start - lex->input;
706         for (;;)
707         {
708                 s++;
709                 len++;
710                 /* Premature end of the string. */
711                 if (len >= lex->input_length)
712                 {
713                         lex->token_terminator = s;
714                         report_invalid_token(lex);
715                 }
716                 else if (*s == '"')
717                         break;
718                 else if ((unsigned char) *s < 32)
719                 {
720                         /* Per RFC4627, these characters MUST be escaped. */
721                         /* Since *s isn't printable, exclude it from the context string */
722                         lex->token_terminator = s;
723                         ereport(ERROR,
724                                         (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
725                                          errmsg("invalid input syntax for type json"),
726                                          errdetail("Character with value 0x%02x must be escaped.",
727                                                            (unsigned char) *s),
728                                          report_json_context(lex)));
729                 }
730                 else if (*s == '\\')
731                 {
732                         /* OK, we have an escape character. */
733                         s++;
734                         len++;
735                         if (len >= lex->input_length)
736                         {
737                                 lex->token_terminator = s;
738                                 report_invalid_token(lex);
739                         }
740                         else if (*s == 'u')
741                         {
742                                 int                     i;
743                                 int                     ch = 0;
744
745                                 for (i = 1; i <= 4; i++)
746                                 {
747                                         s++;
748                                         len++;
749                                         if (len >= lex->input_length)
750                                         {
751                                                 lex->token_terminator = s;
752                                                 report_invalid_token(lex);
753                                         }
754                                         else if (*s >= '0' && *s <= '9')
755                                                 ch = (ch * 16) + (*s - '0');
756                                         else if (*s >= 'a' && *s <= 'f')
757                                                 ch = (ch * 16) + (*s - 'a') + 10;
758                                         else if (*s >= 'A' && *s <= 'F')
759                                                 ch = (ch * 16) + (*s - 'A') + 10;
760                                         else
761                                         {
762                                                 lex->token_terminator = s + pg_mblen(s);
763                                                 ereport(ERROR,
764                                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
765                                                                  errmsg("invalid input syntax for type json"),
766                                                                  errdetail("\"\\u\" must be followed by four hexadecimal digits."),
767                                                                  report_json_context(lex)));
768                                         }
769                                 }
770                                 if (lex->strval != NULL)
771                                 {
772                                         char            utf8str[5];
773                                         int                     utf8len;
774
775                                         if (ch >= 0xd800 && ch <= 0xdbff)
776                                         {
777                                                 if (hi_surrogate != -1)
778                                                         ereport(ERROR,
779                                                            (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
780                                                                 errmsg("invalid input syntax for type json"),
781                                                                 errdetail("Unicode high surrogate must not follow a high surrogate."),
782                                                                 report_json_context(lex)));
783                                                 hi_surrogate = (ch & 0x3ff) << 10;
784                                                 continue;
785                                         }
786                                         else if (ch >= 0xdc00 && ch <= 0xdfff)
787                                         {
788                                                 if (hi_surrogate == -1)
789                                                         ereport(ERROR,
790                                                            (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
791                                                                 errmsg("invalid input syntax for type json"),
792                                                                 errdetail("Unicode low surrogate must follow a high surrogate."),
793                                                                 report_json_context(lex)));
794                                                 ch = 0x10000 + hi_surrogate + (ch & 0x3ff);
795                                                 hi_surrogate = -1;
796                                         }
797
798                                         if (hi_surrogate != -1)
799                                                 ereport(ERROR,
800                                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
801                                                                  errmsg("invalid input syntax for type json"),
802                                                                  errdetail("Unicode low surrogate must follow a high surrogate."),
803                                                                  report_json_context(lex)));
804
805                                         /*
806                                          * For UTF8, replace the escape sequence by the actual
807                                          * utf8 character in lex->strval. Do this also for other
808                                          * encodings if the escape designates an ASCII character,
809                                          * otherwise raise an error.
810                                          */
811
812                                         if (ch == 0)
813                                         {
814                                                 /* We can't allow this, since our TEXT type doesn't */
815                                                 ereport(ERROR,
816                                                                 (errcode(ERRCODE_UNTRANSLATABLE_CHARACTER),
817                                                            errmsg("unsupported Unicode escape sequence"),
818                                                    errdetail("\\u0000 cannot be converted to text."),
819                                                                  report_json_context(lex)));
820                                         }
821                                         else if (GetDatabaseEncoding() == PG_UTF8)
822                                         {
823                                                 unicode_to_utf8(ch, (unsigned char *) utf8str);
824                                                 utf8len = pg_utf_mblen((unsigned char *) utf8str);
825                                                 appendBinaryStringInfo(lex->strval, utf8str, utf8len);
826                                         }
827                                         else if (ch <= 0x007f)
828                                         {
829                                                 /*
830                                                  * This is the only way to designate things like a
831                                                  * form feed character in JSON, so it's useful in all
832                                                  * encodings.
833                                                  */
834                                                 appendStringInfoChar(lex->strval, (char) ch);
835                                         }
836                                         else
837                                         {
838                                                 ereport(ERROR,
839                                                                 (errcode(ERRCODE_UNTRANSLATABLE_CHARACTER),
840                                                            errmsg("unsupported Unicode escape sequence"),
841                                                                  errdetail("Unicode escape values cannot be used for code point values above 007F when the server encoding is not UTF8."),
842                                                                  report_json_context(lex)));
843                                         }
844
845                                 }
846                         }
847                         else if (lex->strval != NULL)
848                         {
849                                 if (hi_surrogate != -1)
850                                         ereport(ERROR,
851                                                         (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
852                                                          errmsg("invalid input syntax for type json"),
853                                                          errdetail("Unicode low surrogate must follow a high surrogate."),
854                                                          report_json_context(lex)));
855
856                                 switch (*s)
857                                 {
858                                         case '"':
859                                         case '\\':
860                                         case '/':
861                                                 appendStringInfoChar(lex->strval, *s);
862                                                 break;
863                                         case 'b':
864                                                 appendStringInfoChar(lex->strval, '\b');
865                                                 break;
866                                         case 'f':
867                                                 appendStringInfoChar(lex->strval, '\f');
868                                                 break;
869                                         case 'n':
870                                                 appendStringInfoChar(lex->strval, '\n');
871                                                 break;
872                                         case 'r':
873                                                 appendStringInfoChar(lex->strval, '\r');
874                                                 break;
875                                         case 't':
876                                                 appendStringInfoChar(lex->strval, '\t');
877                                                 break;
878                                         default:
879                                                 /* Not a valid string escape, so error out. */
880                                                 lex->token_terminator = s + pg_mblen(s);
881                                                 ereport(ERROR,
882                                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
883                                                                  errmsg("invalid input syntax for type json"),
884                                                         errdetail("Escape sequence \"\\%s\" is invalid.",
885                                                                           extract_mb_char(s)),
886                                                                  report_json_context(lex)));
887                                 }
888                         }
889                         else if (strchr("\"\\/bfnrt", *s) == NULL)
890                         {
891                                 /*
892                                  * Simpler processing if we're not bothered about de-escaping
893                                  *
894                                  * It's very tempting to remove the strchr() call here and
895                                  * replace it with a switch statement, but testing so far has
896                                  * shown it's not a performance win.
897                                  */
898                                 lex->token_terminator = s + pg_mblen(s);
899                                 ereport(ERROR,
900                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
901                                                  errmsg("invalid input syntax for type json"),
902                                                  errdetail("Escape sequence \"\\%s\" is invalid.",
903                                                                    extract_mb_char(s)),
904                                                  report_json_context(lex)));
905                         }
906
907                 }
908                 else if (lex->strval != NULL)
909                 {
910                         if (hi_surrogate != -1)
911                                 ereport(ERROR,
912                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
913                                                  errmsg("invalid input syntax for type json"),
914                                                  errdetail("Unicode low surrogate must follow a high surrogate."),
915                                                  report_json_context(lex)));
916
917                         appendStringInfoChar(lex->strval, *s);
918                 }
919
920         }
921
922         if (hi_surrogate != -1)
923                 ereport(ERROR,
924                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
925                                  errmsg("invalid input syntax for type json"),
926                         errdetail("Unicode low surrogate must follow a high surrogate."),
927                                  report_json_context(lex)));
928
929         /* Hooray, we found the end of the string! */
930         lex->prev_token_terminator = lex->token_terminator;
931         lex->token_terminator = s + 1;
932 }
933
934 /*-------------------------------------------------------------------------
935  * The next token in the input stream is known to be a number; lex it.
936  *
937  * In JSON, a number consists of four parts:
938  *
939  * (1) An optional minus sign ('-').
940  *
941  * (2) Either a single '0', or a string of one or more digits that does not
942  *         begin with a '0'.
943  *
944  * (3) An optional decimal part, consisting of a period ('.') followed by
945  *         one or more digits.  (Note: While this part can be omitted
946  *         completely, it's not OK to have only the decimal point without
947  *         any digits afterwards.)
948  *
949  * (4) An optional exponent part, consisting of 'e' or 'E', optionally
950  *         followed by '+' or '-', followed by one or more digits.  (Note:
951  *         As with the decimal part, if 'e' or 'E' is present, it must be
952  *         followed by at least one digit.)
953  *
954  * The 's' argument to this function points to the ostensible beginning
955  * of part 2 - i.e. the character after any optional minus sign, and the
956  * first character of the string if there is none.
957  *
958  *-------------------------------------------------------------------------
959  */
960 static inline void
961 json_lex_number(JsonLexContext *lex, char *s, bool *num_err)
962 {
963         bool            error = false;
964         char       *p;
965         int                     len;
966
967         len = s - lex->input;
968         /* Part (1): leading sign indicator. */
969         /* Caller already did this for us; so do nothing. */
970
971         /* Part (2): parse main digit string. */
972         if (*s == '0')
973         {
974                 s++;
975                 len++;
976         }
977         else if (*s >= '1' && *s <= '9')
978         {
979                 do
980                 {
981                         s++;
982                         len++;
983                 } while (len < lex->input_length && *s >= '0' && *s <= '9');
984         }
985         else
986                 error = true;
987
988         /* Part (3): parse optional decimal portion. */
989         if (len < lex->input_length && *s == '.')
990         {
991                 s++;
992                 len++;
993                 if (len == lex->input_length || *s < '0' || *s > '9')
994                         error = true;
995                 else
996                 {
997                         do
998                         {
999                                 s++;
1000                                 len++;
1001                         } while (len < lex->input_length && *s >= '0' && *s <= '9');
1002                 }
1003         }
1004
1005         /* Part (4): parse optional exponent. */
1006         if (len < lex->input_length && (*s == 'e' || *s == 'E'))
1007         {
1008                 s++;
1009                 len++;
1010                 if (len < lex->input_length && (*s == '+' || *s == '-'))
1011                 {
1012                         s++;
1013                         len++;
1014                 }
1015                 if (len == lex->input_length || *s < '0' || *s > '9')
1016                         error = true;
1017                 else
1018                 {
1019                         do
1020                         {
1021                                 s++;
1022                                 len++;
1023                         } while (len < lex->input_length && *s >= '0' && *s <= '9');
1024                 }
1025         }
1026
1027         /*
1028          * Check for trailing garbage.  As in json_lex(), any alphanumeric stuff
1029          * here should be considered part of the token for error-reporting
1030          * purposes.
1031          */
1032         for (p = s; len < lex->input_length && JSON_ALPHANUMERIC_CHAR(*p); p++, len++)
1033                 error = true;
1034
1035         if (num_err != NULL)
1036         {
1037                 /* let the caller handle the error */
1038                 *num_err = error;
1039         }
1040         else
1041         {
1042                 lex->prev_token_terminator = lex->token_terminator;
1043                 lex->token_terminator = p;
1044                 if (error)
1045                         report_invalid_token(lex);
1046         }
1047 }
1048
1049 /*
1050  * Report a parse error.
1051  *
1052  * lex->token_start and lex->token_terminator must identify the current token.
1053  */
1054 static void
1055 report_parse_error(JsonParseContext ctx, JsonLexContext *lex)
1056 {
1057         char       *token;
1058         int                     toklen;
1059
1060         /* Handle case where the input ended prematurely. */
1061         if (lex->token_start == NULL || lex->token_type == JSON_TOKEN_END)
1062                 ereport(ERROR,
1063                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1064                                  errmsg("invalid input syntax for type json"),
1065                                  errdetail("The input string ended unexpectedly."),
1066                                  report_json_context(lex)));
1067
1068         /* Separate out the current token. */
1069         toklen = lex->token_terminator - lex->token_start;
1070         token = palloc(toklen + 1);
1071         memcpy(token, lex->token_start, toklen);
1072         token[toklen] = '\0';
1073
1074         /* Complain, with the appropriate detail message. */
1075         if (ctx == JSON_PARSE_END)
1076                 ereport(ERROR,
1077                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1078                                  errmsg("invalid input syntax for type json"),
1079                                  errdetail("Expected end of input, but found \"%s\".",
1080                                                    token),
1081                                  report_json_context(lex)));
1082         else
1083         {
1084                 switch (ctx)
1085                 {
1086                         case JSON_PARSE_VALUE:
1087                                 ereport(ERROR,
1088                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1089                                                  errmsg("invalid input syntax for type json"),
1090                                                  errdetail("Expected JSON value, but found \"%s\".",
1091                                                                    token),
1092                                                  report_json_context(lex)));
1093                                 break;
1094                         case JSON_PARSE_STRING:
1095                                 ereport(ERROR,
1096                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1097                                                  errmsg("invalid input syntax for type json"),
1098                                                  errdetail("Expected string, but found \"%s\".",
1099                                                                    token),
1100                                                  report_json_context(lex)));
1101                                 break;
1102                         case JSON_PARSE_ARRAY_START:
1103                                 ereport(ERROR,
1104                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1105                                                  errmsg("invalid input syntax for type json"),
1106                                                  errdetail("Expected array element or \"]\", but found \"%s\".",
1107                                                                    token),
1108                                                  report_json_context(lex)));
1109                                 break;
1110                         case JSON_PARSE_ARRAY_NEXT:
1111                                 ereport(ERROR,
1112                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1113                                                  errmsg("invalid input syntax for type json"),
1114                                           errdetail("Expected \",\" or \"]\", but found \"%s\".",
1115                                                                 token),
1116                                                  report_json_context(lex)));
1117                                 break;
1118                         case JSON_PARSE_OBJECT_START:
1119                                 ereport(ERROR,
1120                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1121                                                  errmsg("invalid input syntax for type json"),
1122                                          errdetail("Expected string or \"}\", but found \"%s\".",
1123                                                            token),
1124                                                  report_json_context(lex)));
1125                                 break;
1126                         case JSON_PARSE_OBJECT_LABEL:
1127                                 ereport(ERROR,
1128                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1129                                                  errmsg("invalid input syntax for type json"),
1130                                                  errdetail("Expected \":\", but found \"%s\".",
1131                                                                    token),
1132                                                  report_json_context(lex)));
1133                                 break;
1134                         case JSON_PARSE_OBJECT_NEXT:
1135                                 ereport(ERROR,
1136                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1137                                                  errmsg("invalid input syntax for type json"),
1138                                           errdetail("Expected \",\" or \"}\", but found \"%s\".",
1139                                                                 token),
1140                                                  report_json_context(lex)));
1141                                 break;
1142                         case JSON_PARSE_OBJECT_COMMA:
1143                                 ereport(ERROR,
1144                                                 (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1145                                                  errmsg("invalid input syntax for type json"),
1146                                                  errdetail("Expected string, but found \"%s\".",
1147                                                                    token),
1148                                                  report_json_context(lex)));
1149                                 break;
1150                         default:
1151                                 elog(ERROR, "unexpected json parse state: %d", ctx);
1152                 }
1153         }
1154 }
1155
1156 /*
1157  * Report an invalid input token.
1158  *
1159  * lex->token_start and lex->token_terminator must identify the token.
1160  */
1161 static void
1162 report_invalid_token(JsonLexContext *lex)
1163 {
1164         char       *token;
1165         int                     toklen;
1166
1167         /* Separate out the offending token. */
1168         toklen = lex->token_terminator - lex->token_start;
1169         token = palloc(toklen + 1);
1170         memcpy(token, lex->token_start, toklen);
1171         token[toklen] = '\0';
1172
1173         ereport(ERROR,
1174                         (errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
1175                          errmsg("invalid input syntax for type json"),
1176                          errdetail("Token \"%s\" is invalid.", token),
1177                          report_json_context(lex)));
1178 }
1179
1180 /*
1181  * Report a CONTEXT line for bogus JSON input.
1182  *
1183  * lex->token_terminator must be set to identify the spot where we detected
1184  * the error.  Note that lex->token_start might be NULL, in case we recognized
1185  * error at EOF.
1186  *
1187  * The return value isn't meaningful, but we make it non-void so that this
1188  * can be invoked inside ereport().
1189  */
1190 static int
1191 report_json_context(JsonLexContext *lex)
1192 {
1193         const char *context_start;
1194         const char *context_end;
1195         const char *line_start;
1196         int                     line_number;
1197         char       *ctxt;
1198         int                     ctxtlen;
1199         const char *prefix;
1200         const char *suffix;
1201
1202         /* Choose boundaries for the part of the input we will display */
1203         context_start = lex->input;
1204         context_end = lex->token_terminator;
1205         line_start = context_start;
1206         line_number = 1;
1207         for (;;)
1208         {
1209                 /* Always advance over newlines */
1210                 if (context_start < context_end && *context_start == '\n')
1211                 {
1212                         context_start++;
1213                         line_start = context_start;
1214                         line_number++;
1215                         continue;
1216                 }
1217                 /* Otherwise, done as soon as we are close enough to context_end */
1218                 if (context_end - context_start < 50)
1219                         break;
1220                 /* Advance to next multibyte character */
1221                 if (IS_HIGHBIT_SET(*context_start))
1222                         context_start += pg_mblen(context_start);
1223                 else
1224                         context_start++;
1225         }
1226
1227         /*
1228          * We add "..." to indicate that the excerpt doesn't start at the
1229          * beginning of the line ... but if we're within 3 characters of the
1230          * beginning of the line, we might as well just show the whole line.
1231          */
1232         if (context_start - line_start <= 3)
1233                 context_start = line_start;
1234
1235         /* Get a null-terminated copy of the data to present */
1236         ctxtlen = context_end - context_start;
1237         ctxt = palloc(ctxtlen + 1);
1238         memcpy(ctxt, context_start, ctxtlen);
1239         ctxt[ctxtlen] = '\0';
1240
1241         /*
1242          * Show the context, prefixing "..." if not starting at start of line, and
1243          * suffixing "..." if not ending at end of line.
1244          */
1245         prefix = (context_start > line_start) ? "..." : "";
1246         suffix = (lex->token_type != JSON_TOKEN_END && context_end - lex->input < lex->input_length && *context_end != '\n' && *context_end != '\r') ? "..." : "";
1247
1248         return errcontext("JSON data, line %d: %s%s%s",
1249                                           line_number, prefix, ctxt, suffix);
1250 }
1251
1252 /*
1253  * Extract a single, possibly multi-byte char from the input string.
1254  */
1255 static char *
1256 extract_mb_char(char *s)
1257 {
1258         char       *res;
1259         int                     len;
1260
1261         len = pg_mblen(s);
1262         res = palloc(len + 1);
1263         memcpy(res, s, len);
1264         res[len] = '\0';
1265
1266         return res;
1267 }
1268
1269 /*
1270  * Determine how we want to print values of a given type in datum_to_json.
1271  *
1272  * Given the datatype OID, return its JsonTypeCategory, as well as the type's
1273  * output function OID.  If the returned category is JSONTYPE_CAST, we
1274  * return the OID of the type->JSON cast function instead.
1275  */
1276 static void
1277 json_categorize_type(Oid typoid,
1278                                          JsonTypeCategory *tcategory,
1279                                          Oid *outfuncoid)
1280 {
1281         bool            typisvarlena;
1282
1283         /* Look through any domain */
1284         typoid = getBaseType(typoid);
1285
1286         *outfuncoid = InvalidOid;
1287
1288         /*
1289          * We need to get the output function for everything except date and
1290          * timestamp types, array and composite types, booleans, and non-builtin
1291          * types where there's a cast to json.
1292          */
1293
1294         switch (typoid)
1295         {
1296                 case BOOLOID:
1297                         *tcategory = JSONTYPE_BOOL;
1298                         break;
1299
1300                 case INT2OID:
1301                 case INT4OID:
1302                 case INT8OID:
1303                 case FLOAT4OID:
1304                 case FLOAT8OID:
1305                 case NUMERICOID:
1306                         getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
1307                         *tcategory = JSONTYPE_NUMERIC;
1308                         break;
1309
1310                 case DATEOID:
1311                         *tcategory = JSONTYPE_DATE;
1312                         break;
1313
1314                 case TIMESTAMPOID:
1315                         *tcategory = JSONTYPE_TIMESTAMP;
1316                         break;
1317
1318                 case TIMESTAMPTZOID:
1319                         *tcategory = JSONTYPE_TIMESTAMPTZ;
1320                         break;
1321
1322                 case JSONOID:
1323                 case JSONBOID:
1324                         getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
1325                         *tcategory = JSONTYPE_JSON;
1326                         break;
1327
1328                 default:
1329                         /* Check for arrays and composites */
1330                         if (OidIsValid(get_element_type(typoid)))
1331                                 *tcategory = JSONTYPE_ARRAY;
1332                         else if (type_is_rowtype(typoid))
1333                                 *tcategory = JSONTYPE_COMPOSITE;
1334                         else
1335                         {
1336                                 /* It's probably the general case ... */
1337                                 *tcategory = JSONTYPE_OTHER;
1338                                 /* but let's look for a cast to json, if it's not built-in */
1339                                 if (typoid >= FirstNormalObjectId)
1340                                 {
1341                                         Oid                     castfunc;
1342                                         CoercionPathType ctype;
1343
1344                                         ctype = find_coercion_pathway(JSONOID, typoid,
1345                                                                                                   COERCION_EXPLICIT,
1346                                                                                                   &castfunc);
1347                                         if (ctype == COERCION_PATH_FUNC && OidIsValid(castfunc))
1348                                         {
1349                                                 *tcategory = JSONTYPE_CAST;
1350                                                 *outfuncoid = castfunc;
1351                                         }
1352                                         else
1353                                         {
1354                                                 /* non builtin type with no cast */
1355                                                 getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
1356                                         }
1357                                 }
1358                                 else
1359                                 {
1360                                         /* any other builtin type */
1361                                         getTypeOutputInfo(typoid, outfuncoid, &typisvarlena);
1362                                 }
1363                         }
1364                         break;
1365         }
1366 }
1367
1368 /*
1369  * Turn a Datum into JSON text, appending the string to "result".
1370  *
1371  * tcategory and outfuncoid are from a previous call to json_categorize_type,
1372  * except that if is_null is true then they can be invalid.
1373  *
1374  * If key_scalar is true, the value is being printed as a key, so insist
1375  * it's of an acceptable type, and force it to be quoted.
1376  */
1377 static void
1378 datum_to_json(Datum val, bool is_null, StringInfo result,
1379                           JsonTypeCategory tcategory, Oid outfuncoid,
1380                           bool key_scalar)
1381 {
1382         char       *outputstr;
1383         text       *jsontext;
1384
1385         /* callers are expected to ensure that null keys are not passed in */
1386         Assert(!(key_scalar && is_null));
1387
1388         if (is_null)
1389         {
1390                 appendStringInfoString(result, "null");
1391                 return;
1392         }
1393
1394         if (key_scalar &&
1395                 (tcategory == JSONTYPE_ARRAY ||
1396                  tcategory == JSONTYPE_COMPOSITE ||
1397                  tcategory == JSONTYPE_JSON ||
1398                  tcategory == JSONTYPE_CAST))
1399                 ereport(ERROR,
1400                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1401                  errmsg("key value must be scalar, not array, composite, or json")));
1402
1403         switch (tcategory)
1404         {
1405                 case JSONTYPE_ARRAY:
1406                         array_to_json_internal(val, result, false);
1407                         break;
1408                 case JSONTYPE_COMPOSITE:
1409                         composite_to_json(val, result, false);
1410                         break;
1411                 case JSONTYPE_BOOL:
1412                         outputstr = DatumGetBool(val) ? "true" : "false";
1413                         if (key_scalar)
1414                                 escape_json(result, outputstr);
1415                         else
1416                                 appendStringInfoString(result, outputstr);
1417                         break;
1418                 case JSONTYPE_NUMERIC:
1419                         outputstr = OidOutputFunctionCall(outfuncoid, val);
1420
1421                         /*
1422                          * Don't call escape_json for a non-key if it's a valid JSON
1423                          * number.
1424                          */
1425                         if (!key_scalar && IsValidJsonNumber(outputstr, strlen(outputstr)))
1426                                 appendStringInfoString(result, outputstr);
1427                         else
1428                                 escape_json(result, outputstr);
1429                         pfree(outputstr);
1430                         break;
1431                 case JSONTYPE_DATE:
1432                         {
1433                                 DateADT         date;
1434                                 struct pg_tm tm;
1435                                 char            buf[MAXDATELEN + 1];
1436
1437                                 date = DatumGetDateADT(val);
1438
1439                                 /* XSD doesn't support infinite values */
1440                                 if (DATE_NOT_FINITE(date))
1441                                         ereport(ERROR,
1442                                                         (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
1443                                                          errmsg("date out of range"),
1444                                                          errdetail("JSON does not support infinite date values.")));
1445                                 else
1446                                 {
1447                                         j2date(date + POSTGRES_EPOCH_JDATE,
1448                                                    &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday));
1449                                         EncodeDateOnly(&tm, USE_XSD_DATES, buf);
1450                                 }
1451
1452                                 appendStringInfo(result, "\"%s\"", buf);
1453                         }
1454                         break;
1455                 case JSONTYPE_TIMESTAMP:
1456                         {
1457                                 Timestamp       timestamp;
1458                                 struct pg_tm tm;
1459                                 fsec_t          fsec;
1460                                 char            buf[MAXDATELEN + 1];
1461
1462                                 timestamp = DatumGetTimestamp(val);
1463
1464                                 /* XSD doesn't support infinite values */
1465                                 if (TIMESTAMP_NOT_FINITE(timestamp))
1466                                         ereport(ERROR,
1467                                                         (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
1468                                                          errmsg("timestamp out of range"),
1469                                                          errdetail("JSON does not support infinite timestamp values.")));
1470                                 else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0)
1471                                         EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf);
1472                                 else
1473                                         ereport(ERROR,
1474                                                         (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
1475                                                          errmsg("timestamp out of range")));
1476
1477                                 appendStringInfo(result, "\"%s\"", buf);
1478                         }
1479                         break;
1480                 case JSONTYPE_TIMESTAMPTZ:
1481                         {
1482                                 TimestampTz timestamp;
1483                                 struct pg_tm tm;
1484                                 int                     tz;
1485                                 fsec_t          fsec;
1486                                 const char *tzn = NULL;
1487                                 char            buf[MAXDATELEN + 1];
1488
1489                                 timestamp = DatumGetTimestamp(val);
1490
1491                                 /* XSD doesn't support infinite values */
1492                                 if (TIMESTAMP_NOT_FINITE(timestamp))
1493                                         ereport(ERROR,
1494                                                         (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
1495                                                          errmsg("timestamp out of range"),
1496                                                          errdetail("JSON does not support infinite timestamp values.")));
1497                                 else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0)
1498                                         EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf);
1499                                 else
1500                                         ereport(ERROR,
1501                                                         (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE),
1502                                                          errmsg("timestamp out of range")));
1503
1504                                 appendStringInfo(result, "\"%s\"", buf);
1505                         }
1506                         break;
1507                 case JSONTYPE_JSON:
1508                         /* JSON and JSONB output will already be escaped */
1509                         outputstr = OidOutputFunctionCall(outfuncoid, val);
1510                         appendStringInfoString(result, outputstr);
1511                         pfree(outputstr);
1512                         break;
1513                 case JSONTYPE_CAST:
1514                         /* outfuncoid refers to a cast function, not an output function */
1515                         jsontext = DatumGetTextP(OidFunctionCall1(outfuncoid, val));
1516                         outputstr = text_to_cstring(jsontext);
1517                         appendStringInfoString(result, outputstr);
1518                         pfree(outputstr);
1519                         pfree(jsontext);
1520                         break;
1521                 default:
1522                         outputstr = OidOutputFunctionCall(outfuncoid, val);
1523                         escape_json(result, outputstr);
1524                         pfree(outputstr);
1525                         break;
1526         }
1527 }
1528
1529 /*
1530  * Process a single dimension of an array.
1531  * If it's the innermost dimension, output the values, otherwise call
1532  * ourselves recursively to process the next dimension.
1533  */
1534 static void
1535 array_dim_to_json(StringInfo result, int dim, int ndims, int *dims, Datum *vals,
1536                                   bool *nulls, int *valcount, JsonTypeCategory tcategory,
1537                                   Oid outfuncoid, bool use_line_feeds)
1538 {
1539         int                     i;
1540         const char *sep;
1541
1542         Assert(dim < ndims);
1543
1544         sep = use_line_feeds ? ",\n " : ",";
1545
1546         appendStringInfoChar(result, '[');
1547
1548         for (i = 1; i <= dims[dim]; i++)
1549         {
1550                 if (i > 1)
1551                         appendStringInfoString(result, sep);
1552
1553                 if (dim + 1 == ndims)
1554                 {
1555                         datum_to_json(vals[*valcount], nulls[*valcount], result, tcategory,
1556                                                   outfuncoid, false);
1557                         (*valcount)++;
1558                 }
1559                 else
1560                 {
1561                         /*
1562                          * Do we want line feeds on inner dimensions of arrays? For now
1563                          * we'll say no.
1564                          */
1565                         array_dim_to_json(result, dim + 1, ndims, dims, vals, nulls,
1566                                                           valcount, tcategory, outfuncoid, false);
1567                 }
1568         }
1569
1570         appendStringInfoChar(result, ']');
1571 }
1572
1573 /*
1574  * Turn an array into JSON.
1575  */
1576 static void
1577 array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
1578 {
1579         ArrayType  *v = DatumGetArrayTypeP(array);
1580         Oid                     element_type = ARR_ELEMTYPE(v);
1581         int                *dim;
1582         int                     ndim;
1583         int                     nitems;
1584         int                     count = 0;
1585         Datum      *elements;
1586         bool       *nulls;
1587         int16           typlen;
1588         bool            typbyval;
1589         char            typalign;
1590         JsonTypeCategory tcategory;
1591         Oid                     outfuncoid;
1592
1593         ndim = ARR_NDIM(v);
1594         dim = ARR_DIMS(v);
1595         nitems = ArrayGetNItems(ndim, dim);
1596
1597         if (nitems <= 0)
1598         {
1599                 appendStringInfoString(result, "[]");
1600                 return;
1601         }
1602
1603         get_typlenbyvalalign(element_type,
1604                                                  &typlen, &typbyval, &typalign);
1605
1606         json_categorize_type(element_type,
1607                                                  &tcategory, &outfuncoid);
1608
1609         deconstruct_array(v, element_type, typlen, typbyval,
1610                                           typalign, &elements, &nulls,
1611                                           &nitems);
1612
1613         array_dim_to_json(result, 0, ndim, dim, elements, nulls, &count, tcategory,
1614                                           outfuncoid, use_line_feeds);
1615
1616         pfree(elements);
1617         pfree(nulls);
1618 }
1619
1620 /*
1621  * Turn a composite / record into JSON.
1622  */
1623 static void
1624 composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
1625 {
1626         HeapTupleHeader td;
1627         Oid                     tupType;
1628         int32           tupTypmod;
1629         TupleDesc       tupdesc;
1630         HeapTupleData tmptup,
1631                            *tuple;
1632         int                     i;
1633         bool            needsep = false;
1634         const char *sep;
1635
1636         sep = use_line_feeds ? ",\n " : ",";
1637
1638         td = DatumGetHeapTupleHeader(composite);
1639
1640         /* Extract rowtype info and find a tupdesc */
1641         tupType = HeapTupleHeaderGetTypeId(td);
1642         tupTypmod = HeapTupleHeaderGetTypMod(td);
1643         tupdesc = lookup_rowtype_tupdesc(tupType, tupTypmod);
1644
1645         /* Build a temporary HeapTuple control structure */
1646         tmptup.t_len = HeapTupleHeaderGetDatumLength(td);
1647         tmptup.t_data = td;
1648         tuple = &tmptup;
1649
1650         appendStringInfoChar(result, '{');
1651
1652         for (i = 0; i < tupdesc->natts; i++)
1653         {
1654                 Datum           val;
1655                 bool            isnull;
1656                 char       *attname;
1657                 JsonTypeCategory tcategory;
1658                 Oid                     outfuncoid;
1659
1660                 if (tupdesc->attrs[i]->attisdropped)
1661                         continue;
1662
1663                 if (needsep)
1664                         appendStringInfoString(result, sep);
1665                 needsep = true;
1666
1667                 attname = NameStr(tupdesc->attrs[i]->attname);
1668                 escape_json(result, attname);
1669                 appendStringInfoChar(result, ':');
1670
1671                 val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
1672
1673                 if (isnull)
1674                 {
1675                         tcategory = JSONTYPE_NULL;
1676                         outfuncoid = InvalidOid;
1677                 }
1678                 else
1679                         json_categorize_type(tupdesc->attrs[i]->atttypid,
1680                                                                  &tcategory, &outfuncoid);
1681
1682                 datum_to_json(val, isnull, result, tcategory, outfuncoid, false);
1683         }
1684
1685         appendStringInfoChar(result, '}');
1686         ReleaseTupleDesc(tupdesc);
1687 }
1688
1689 /*
1690  * Append JSON text for "val" to "result".
1691  *
1692  * This is just a thin wrapper around datum_to_json.  If the same type will be
1693  * printed many times, avoid using this; better to do the json_categorize_type
1694  * lookups only once.
1695  */
1696 static void
1697 add_json(Datum val, bool is_null, StringInfo result,
1698                  Oid val_type, bool key_scalar)
1699 {
1700         JsonTypeCategory tcategory;
1701         Oid                     outfuncoid;
1702
1703         if (val_type == InvalidOid)
1704                 ereport(ERROR,
1705                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1706                                  errmsg("could not determine input data type")));
1707
1708         if (is_null)
1709         {
1710                 tcategory = JSONTYPE_NULL;
1711                 outfuncoid = InvalidOid;
1712         }
1713         else
1714                 json_categorize_type(val_type,
1715                                                          &tcategory, &outfuncoid);
1716
1717         datum_to_json(val, is_null, result, tcategory, outfuncoid, key_scalar);
1718 }
1719
1720 /*
1721  * SQL function array_to_json(row)
1722  */
1723 extern Datum
1724 array_to_json(PG_FUNCTION_ARGS)
1725 {
1726         Datum           array = PG_GETARG_DATUM(0);
1727         StringInfo      result;
1728
1729         result = makeStringInfo();
1730
1731         array_to_json_internal(array, result, false);
1732
1733         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
1734 }
1735
1736 /*
1737  * SQL function array_to_json(row, prettybool)
1738  */
1739 extern Datum
1740 array_to_json_pretty(PG_FUNCTION_ARGS)
1741 {
1742         Datum           array = PG_GETARG_DATUM(0);
1743         bool            use_line_feeds = PG_GETARG_BOOL(1);
1744         StringInfo      result;
1745
1746         result = makeStringInfo();
1747
1748         array_to_json_internal(array, result, use_line_feeds);
1749
1750         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
1751 }
1752
1753 /*
1754  * SQL function row_to_json(row)
1755  */
1756 extern Datum
1757 row_to_json(PG_FUNCTION_ARGS)
1758 {
1759         Datum           array = PG_GETARG_DATUM(0);
1760         StringInfo      result;
1761
1762         result = makeStringInfo();
1763
1764         composite_to_json(array, result, false);
1765
1766         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
1767 }
1768
1769 /*
1770  * SQL function row_to_json(row, prettybool)
1771  */
1772 extern Datum
1773 row_to_json_pretty(PG_FUNCTION_ARGS)
1774 {
1775         Datum           array = PG_GETARG_DATUM(0);
1776         bool            use_line_feeds = PG_GETARG_BOOL(1);
1777         StringInfo      result;
1778
1779         result = makeStringInfo();
1780
1781         composite_to_json(array, result, use_line_feeds);
1782
1783         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
1784 }
1785
1786 /*
1787  * SQL function to_json(anyvalue)
1788  */
1789 Datum
1790 to_json(PG_FUNCTION_ARGS)
1791 {
1792         Datum           val = PG_GETARG_DATUM(0);
1793         Oid                     val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
1794         StringInfo      result;
1795         JsonTypeCategory tcategory;
1796         Oid                     outfuncoid;
1797
1798         if (val_type == InvalidOid)
1799                 ereport(ERROR,
1800                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1801                                  errmsg("could not determine input data type")));
1802
1803         json_categorize_type(val_type,
1804                                                  &tcategory, &outfuncoid);
1805
1806         result = makeStringInfo();
1807
1808         datum_to_json(val, false, result, tcategory, outfuncoid, false);
1809
1810         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
1811 }
1812
1813 /*
1814  * json_agg transition function
1815  *
1816  * aggregate input column as a json array value.
1817  */
1818 Datum
1819 json_agg_transfn(PG_FUNCTION_ARGS)
1820 {
1821         Oid                     val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
1822         MemoryContext aggcontext,
1823                                 oldcontext;
1824         StringInfo      state;
1825         Datum           val;
1826         JsonTypeCategory tcategory;
1827         Oid                     outfuncoid;
1828
1829         if (val_type == InvalidOid)
1830                 ereport(ERROR,
1831                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1832                                  errmsg("could not determine input data type")));
1833
1834         if (!AggCheckCallContext(fcinfo, &aggcontext))
1835         {
1836                 /* cannot be called directly because of internal-type argument */
1837                 elog(ERROR, "json_agg_transfn called in non-aggregate context");
1838         }
1839
1840         if (PG_ARGISNULL(0))
1841         {
1842                 /*
1843                  * Make this StringInfo in a context where it will persist for the
1844                  * duration of the aggregate call.  MemoryContextSwitchTo is only
1845                  * needed the first time, as the StringInfo routines make sure they
1846                  * use the right context to enlarge the object if necessary.
1847                  */
1848                 oldcontext = MemoryContextSwitchTo(aggcontext);
1849                 state = makeStringInfo();
1850                 MemoryContextSwitchTo(oldcontext);
1851
1852                 appendStringInfoChar(state, '[');
1853         }
1854         else
1855         {
1856                 state = (StringInfo) PG_GETARG_POINTER(0);
1857                 appendStringInfoString(state, ", ");
1858         }
1859
1860         /* fast path for NULLs */
1861         if (PG_ARGISNULL(1))
1862         {
1863                 datum_to_json((Datum) 0, true, state, JSONTYPE_NULL, InvalidOid, false);
1864                 PG_RETURN_POINTER(state);
1865         }
1866
1867         val = PG_GETARG_DATUM(1);
1868
1869         /* XXX we do this every time?? */
1870         json_categorize_type(val_type,
1871                                                  &tcategory, &outfuncoid);
1872
1873         /* add some whitespace if structured type and not first item */
1874         if (!PG_ARGISNULL(0) &&
1875                 (tcategory == JSONTYPE_ARRAY || tcategory == JSONTYPE_COMPOSITE))
1876         {
1877                 appendStringInfoString(state, "\n ");
1878         }
1879
1880         datum_to_json(val, false, state, tcategory, outfuncoid, false);
1881
1882         /*
1883          * The transition type for array_agg() is declared to be "internal", which
1884          * is a pass-by-value type the same size as a pointer.  So we can safely
1885          * pass the ArrayBuildState pointer through nodeAgg.c's machinations.
1886          */
1887         PG_RETURN_POINTER(state);
1888 }
1889
1890 /*
1891  * json_agg final function
1892  */
1893 Datum
1894 json_agg_finalfn(PG_FUNCTION_ARGS)
1895 {
1896         StringInfo      state;
1897
1898         /* cannot be called directly because of internal-type argument */
1899         Assert(AggCheckCallContext(fcinfo, NULL));
1900
1901         state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
1902
1903         /* NULL result for no rows in, as is standard with aggregates */
1904         if (state == NULL)
1905                 PG_RETURN_NULL();
1906
1907         /* Else return state with appropriate array terminator added */
1908         PG_RETURN_TEXT_P(catenate_stringinfo_string(state, "]"));
1909 }
1910
1911 /*
1912  * json_object_agg transition function.
1913  *
1914  * aggregate two input columns as a single json object value.
1915  */
1916 Datum
1917 json_object_agg_transfn(PG_FUNCTION_ARGS)
1918 {
1919         Oid                     val_type;
1920         MemoryContext aggcontext,
1921                                 oldcontext;
1922         StringInfo      state;
1923         Datum           arg;
1924
1925         if (!AggCheckCallContext(fcinfo, &aggcontext))
1926         {
1927                 /* cannot be called directly because of internal-type argument */
1928                 elog(ERROR, "json_object_agg_transfn called in non-aggregate context");
1929         }
1930
1931         if (PG_ARGISNULL(0))
1932         {
1933                 /*
1934                  * Make the StringInfo in a context where it will persist for the
1935                  * duration of the aggregate call. Switching context is only needed
1936                  * for this initial step, as the StringInfo routines make sure they
1937                  * use the right context to enlarge the object if necessary.
1938                  */
1939                 oldcontext = MemoryContextSwitchTo(aggcontext);
1940                 state = makeStringInfo();
1941                 MemoryContextSwitchTo(oldcontext);
1942
1943                 appendStringInfoString(state, "{ ");
1944         }
1945         else
1946         {
1947                 state = (StringInfo) PG_GETARG_POINTER(0);
1948                 appendStringInfoString(state, ", ");
1949         }
1950
1951         /*
1952          * Note: since json_object_agg() is declared as taking type "any", the
1953          * parser will not do any type conversion on unknown-type literals (that
1954          * is, undecorated strings or NULLs).  Such values will arrive here as
1955          * type UNKNOWN, which fortunately does not matter to us, since
1956          * unknownout() works fine.
1957          */
1958         val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
1959
1960         if (val_type == InvalidOid)
1961                 ereport(ERROR,
1962                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1963                                  errmsg("could not determine data type for argument %d", 1)));
1964
1965         if (PG_ARGISNULL(1))
1966                 ereport(ERROR,
1967                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1968                                  errmsg("field name must not be null")));
1969
1970         arg = PG_GETARG_DATUM(1);
1971
1972         add_json(arg, false, state, val_type, true);
1973
1974         appendStringInfoString(state, " : ");
1975
1976         val_type = get_fn_expr_argtype(fcinfo->flinfo, 2);
1977
1978         if (val_type == InvalidOid)
1979                 ereport(ERROR,
1980                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
1981                                  errmsg("could not determine data type for argument %d", 2)));
1982
1983         if (PG_ARGISNULL(2))
1984                 arg = (Datum) 0;
1985         else
1986                 arg = PG_GETARG_DATUM(2);
1987
1988         add_json(arg, PG_ARGISNULL(2), state, val_type, false);
1989
1990         PG_RETURN_POINTER(state);
1991 }
1992
1993 /*
1994  * json_object_agg final function.
1995  */
1996 Datum
1997 json_object_agg_finalfn(PG_FUNCTION_ARGS)
1998 {
1999         StringInfo      state;
2000
2001         /* cannot be called directly because of internal-type argument */
2002         Assert(AggCheckCallContext(fcinfo, NULL));
2003
2004         state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
2005
2006         /* NULL result for no rows in, as is standard with aggregates */
2007         if (state == NULL)
2008                 PG_RETURN_NULL();
2009
2010         /* Else return state with appropriate object terminator added */
2011         PG_RETURN_TEXT_P(catenate_stringinfo_string(state, " }"));
2012 }
2013
2014 /*
2015  * Helper function for aggregates: return given StringInfo's contents plus
2016  * specified trailing string, as a text datum.  We need this because aggregate
2017  * final functions are not allowed to modify the aggregate state.
2018  */
2019 static text *
2020 catenate_stringinfo_string(StringInfo buffer, const char *addon)
2021 {
2022         /* custom version of cstring_to_text_with_len */
2023         int                     buflen = buffer->len;
2024         int                     addlen = strlen(addon);
2025         text       *result = (text *) palloc(buflen + addlen + VARHDRSZ);
2026
2027         SET_VARSIZE(result, buflen + addlen + VARHDRSZ);
2028         memcpy(VARDATA(result), buffer->data, buflen);
2029         memcpy(VARDATA(result) + buflen, addon, addlen);
2030
2031         return result;
2032 }
2033
2034 /*
2035  * SQL function json_build_object(variadic "any")
2036  */
2037 Datum
2038 json_build_object(PG_FUNCTION_ARGS)
2039 {
2040         int                     nargs = PG_NARGS();
2041         int                     i;
2042         Datum           arg;
2043         const char *sep = "";
2044         StringInfo      result;
2045         Oid                     val_type;
2046
2047         if (nargs % 2 != 0)
2048                 ereport(ERROR,
2049                                 (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
2050                                  errmsg("argument list must have even number of elements"),
2051                                  errhint("The arguments of json_build_object() must consist of alternating keys and values.")));
2052
2053         result = makeStringInfo();
2054
2055         appendStringInfoChar(result, '{');
2056
2057         for (i = 0; i < nargs; i += 2)
2058         {
2059                 /*
2060                  * Note: since json_build_object() is declared as taking type "any",
2061                  * the parser will not do any type conversion on unknown-type literals
2062                  * (that is, undecorated strings or NULLs).  Such values will arrive
2063                  * here as type UNKNOWN, which fortunately does not matter to us,
2064                  * since unknownout() works fine.
2065                  */
2066                 appendStringInfoString(result, sep);
2067                 sep = ", ";
2068
2069                 /* process key */
2070                 val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
2071
2072                 if (val_type == InvalidOid)
2073                         ereport(ERROR,
2074                                         (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
2075                                          errmsg("could not determine data type for argument %d",
2076                                                         i + 1)));
2077
2078                 if (PG_ARGISNULL(i))
2079                         ereport(ERROR,
2080                                         (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
2081                                          errmsg("argument %d cannot be null", i + 1),
2082                                          errhint("Object keys should be text.")));
2083
2084                 arg = PG_GETARG_DATUM(i);
2085
2086                 add_json(arg, false, result, val_type, true);
2087
2088                 appendStringInfoString(result, " : ");
2089
2090                 /* process value */
2091                 val_type = get_fn_expr_argtype(fcinfo->flinfo, i + 1);
2092
2093                 if (val_type == InvalidOid)
2094                         ereport(ERROR,
2095                                         (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
2096                                          errmsg("could not determine data type for argument %d",
2097                                                         i + 2)));
2098
2099                 if (PG_ARGISNULL(i + 1))
2100                         arg = (Datum) 0;
2101                 else
2102                         arg = PG_GETARG_DATUM(i + 1);
2103
2104                 add_json(arg, PG_ARGISNULL(i + 1), result, val_type, false);
2105         }
2106
2107         appendStringInfoChar(result, '}');
2108
2109         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
2110 }
2111
2112 /*
2113  * degenerate case of json_build_object where it gets 0 arguments.
2114  */
2115 Datum
2116 json_build_object_noargs(PG_FUNCTION_ARGS)
2117 {
2118         PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
2119 }
2120
2121 /*
2122  * SQL function json_build_array(variadic "any")
2123  */
2124 Datum
2125 json_build_array(PG_FUNCTION_ARGS)
2126 {
2127         int                     nargs = PG_NARGS();
2128         int                     i;
2129         Datum           arg;
2130         const char *sep = "";
2131         StringInfo      result;
2132         Oid                     val_type;
2133
2134         result = makeStringInfo();
2135
2136         appendStringInfoChar(result, '[');
2137
2138         for (i = 0; i < nargs; i++)
2139         {
2140                 /*
2141                  * Note: since json_build_array() is declared as taking type "any",
2142                  * the parser will not do any type conversion on unknown-type literals
2143                  * (that is, undecorated strings or NULLs).  Such values will arrive
2144                  * here as type UNKNOWN, which fortunately does not matter to us,
2145                  * since unknownout() works fine.
2146                  */
2147                 appendStringInfoString(result, sep);
2148                 sep = ", ";
2149
2150                 val_type = get_fn_expr_argtype(fcinfo->flinfo, i);
2151
2152                 if (val_type == InvalidOid)
2153                         ereport(ERROR,
2154                                         (errcode(ERRCODE_INVALID_PARAMETER_VALUE),
2155                                          errmsg("could not determine data type for argument %d",
2156                                                         i + 1)));
2157
2158                 if (PG_ARGISNULL(i))
2159                         arg = (Datum) 0;
2160                 else
2161                         arg = PG_GETARG_DATUM(i);
2162
2163                 add_json(arg, PG_ARGISNULL(i), result, val_type, false);
2164         }
2165
2166         appendStringInfoChar(result, ']');
2167
2168         PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
2169 }
2170
2171 /*
2172  * degenerate case of json_build_array where it gets 0 arguments.
2173  */
2174 Datum
2175 json_build_array_noargs(PG_FUNCTION_ARGS)
2176 {
2177         PG_RETURN_TEXT_P(cstring_to_text_with_len("[]", 2));
2178 }
2179
2180 /*
2181  * SQL function json_object(text[])
2182  *
2183  * take a one or two dimensional array of text as key/value pairs
2184  * for a json object.
2185  */
2186 Datum
2187 json_object(PG_FUNCTION_ARGS)
2188 {
2189         ArrayType  *in_array = PG_GETARG_ARRAYTYPE_P(0);
2190         int                     ndims = ARR_NDIM(in_array);
2191         StringInfoData result;
2192         Datum      *in_datums;
2193         bool       *in_nulls;
2194         int                     in_count,
2195                                 count,
2196                                 i;
2197         text       *rval;
2198         char       *v;
2199
2200         switch (ndims)
2201         {
2202                 case 0:
2203                         PG_RETURN_DATUM(CStringGetTextDatum("{}"));
2204                         break;
2205
2206                 case 1:
2207                         if ((ARR_DIMS(in_array)[0]) % 2)
2208                                 ereport(ERROR,
2209                                                 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
2210                                                  errmsg("array must have even number of elements")));
2211                         break;
2212
2213                 case 2:
2214                         if ((ARR_DIMS(in_array)[1]) != 2)
2215                                 ereport(ERROR,
2216                                                 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
2217                                                  errmsg("array must have two columns")));
2218                         break;
2219
2220                 default:
2221                         ereport(ERROR,
2222                                         (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
2223                                          errmsg("wrong number of array subscripts")));
2224         }
2225
2226         deconstruct_array(in_array,
2227                                           TEXTOID, -1, false, 'i',
2228                                           &in_datums, &in_nulls, &in_count);
2229
2230         count = in_count / 2;
2231
2232         initStringInfo(&result);
2233
2234         appendStringInfoChar(&result, '{');
2235
2236         for (i = 0; i < count; ++i)
2237         {
2238                 if (in_nulls[i * 2])
2239                         ereport(ERROR,
2240                                         (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
2241                                          errmsg("null value not allowed for object key")));
2242
2243                 v = TextDatumGetCString(in_datums[i * 2]);
2244                 if (i > 0)
2245                         appendStringInfoString(&result, ", ");
2246                 escape_json(&result, v);
2247                 appendStringInfoString(&result, " : ");
2248                 pfree(v);
2249                 if (in_nulls[i * 2 + 1])
2250                         appendStringInfoString(&result, "null");
2251                 else
2252                 {
2253                         v = TextDatumGetCString(in_datums[i * 2 + 1]);
2254                         escape_json(&result, v);
2255                         pfree(v);
2256                 }
2257         }
2258
2259         appendStringInfoChar(&result, '}');
2260
2261         pfree(in_datums);
2262         pfree(in_nulls);
2263
2264         rval = cstring_to_text_with_len(result.data, result.len);
2265         pfree(result.data);
2266
2267         PG_RETURN_TEXT_P(rval);
2268
2269 }
2270
2271 /*
2272  * SQL function json_object(text[], text[])
2273  *
2274  * take separate key and value arrays of text to construct a json object
2275  * pairwise.
2276  */
2277 Datum
2278 json_object_two_arg(PG_FUNCTION_ARGS)
2279 {
2280         ArrayType  *key_array = PG_GETARG_ARRAYTYPE_P(0);
2281         ArrayType  *val_array = PG_GETARG_ARRAYTYPE_P(1);
2282         int                     nkdims = ARR_NDIM(key_array);
2283         int                     nvdims = ARR_NDIM(val_array);
2284         StringInfoData result;
2285         Datum      *key_datums,
2286                            *val_datums;
2287         bool       *key_nulls,
2288                            *val_nulls;
2289         int                     key_count,
2290                                 val_count,
2291                                 i;
2292         text       *rval;
2293         char       *v;
2294
2295         if (nkdims > 1 || nkdims != nvdims)
2296                 ereport(ERROR,
2297                                 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
2298                                  errmsg("wrong number of array subscripts")));
2299
2300         if (nkdims == 0)
2301                 PG_RETURN_DATUM(CStringGetTextDatum("{}"));
2302
2303         deconstruct_array(key_array,
2304                                           TEXTOID, -1, false, 'i',
2305                                           &key_datums, &key_nulls, &key_count);
2306
2307         deconstruct_array(val_array,
2308                                           TEXTOID, -1, false, 'i',
2309                                           &val_datums, &val_nulls, &val_count);
2310
2311         if (key_count != val_count)
2312                 ereport(ERROR,
2313                                 (errcode(ERRCODE_ARRAY_SUBSCRIPT_ERROR),
2314                                  errmsg("mismatched array dimensions")));
2315
2316         initStringInfo(&result);
2317
2318         appendStringInfoChar(&result, '{');
2319
2320         for (i = 0; i < key_count; ++i)
2321         {
2322                 if (key_nulls[i])
2323                         ereport(ERROR,
2324                                         (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
2325                                          errmsg("null value not allowed for object key")));
2326
2327                 v = TextDatumGetCString(key_datums[i]);
2328                 if (i > 0)
2329                         appendStringInfoString(&result, ", ");
2330                 escape_json(&result, v);
2331                 appendStringInfoString(&result, " : ");
2332                 pfree(v);
2333                 if (val_nulls[i])
2334                         appendStringInfoString(&result, "null");
2335                 else
2336                 {
2337                         v = TextDatumGetCString(val_datums[i]);
2338                         escape_json(&result, v);
2339                         pfree(v);
2340                 }
2341         }
2342
2343         appendStringInfoChar(&result, '}');
2344
2345         pfree(key_datums);
2346         pfree(key_nulls);
2347         pfree(val_datums);
2348         pfree(val_nulls);
2349
2350         rval = cstring_to_text_with_len(result.data, result.len);
2351         pfree(result.data);
2352
2353         PG_RETURN_TEXT_P(rval);
2354 }
2355
2356
2357 /*
2358  * Produce a JSON string literal, properly escaping characters in the text.
2359  */
2360 void
2361 escape_json(StringInfo buf, const char *str)
2362 {
2363         const char *p;
2364
2365         appendStringInfoCharMacro(buf, '\"');
2366         for (p = str; *p; p++)
2367         {
2368                 switch (*p)
2369                 {
2370                         case '\b':
2371                                 appendStringInfoString(buf, "\\b");
2372                                 break;
2373                         case '\f':
2374                                 appendStringInfoString(buf, "\\f");
2375                                 break;
2376                         case '\n':
2377                                 appendStringInfoString(buf, "\\n");
2378                                 break;
2379                         case '\r':
2380                                 appendStringInfoString(buf, "\\r");
2381                                 break;
2382                         case '\t':
2383                                 appendStringInfoString(buf, "\\t");
2384                                 break;
2385                         case '"':
2386                                 appendStringInfoString(buf, "\\\"");
2387                                 break;
2388                         case '\\':
2389                                 appendStringInfoString(buf, "\\\\");
2390                                 break;
2391                         default:
2392                                 if ((unsigned char) *p < ' ')
2393                                         appendStringInfo(buf, "\\u%04x", (int) *p);
2394                                 else
2395                                         appendStringInfoCharMacro(buf, *p);
2396                                 break;
2397                 }
2398         }
2399         appendStringInfoCharMacro(buf, '\"');
2400 }
2401
2402 /*
2403  * SQL function json_typeof(json) -> text
2404  *
2405  * Returns the type of the outermost JSON value as TEXT.  Possible types are
2406  * "object", "array", "string", "number", "boolean", and "null".
2407  *
2408  * Performs a single call to json_lex() to get the first token of the supplied
2409  * value.  This initial token uniquely determines the value's type.  As our
2410  * input must already have been validated by json_in() or json_recv(), the
2411  * initial token should never be JSON_TOKEN_OBJECT_END, JSON_TOKEN_ARRAY_END,
2412  * JSON_TOKEN_COLON, JSON_TOKEN_COMMA, or JSON_TOKEN_END.
2413  */
2414 Datum
2415 json_typeof(PG_FUNCTION_ARGS)
2416 {
2417         text       *json;
2418
2419         JsonLexContext *lex;
2420         JsonTokenType tok;
2421         char       *type;
2422
2423         json = PG_GETARG_TEXT_P(0);
2424         lex = makeJsonLexContext(json, false);
2425
2426         /* Lex exactly one token from the input and check its type. */
2427         json_lex(lex);
2428         tok = lex_peek(lex);
2429         switch (tok)
2430         {
2431                 case JSON_TOKEN_OBJECT_START:
2432                         type = "object";
2433                         break;
2434                 case JSON_TOKEN_ARRAY_START:
2435                         type = "array";
2436                         break;
2437                 case JSON_TOKEN_STRING:
2438                         type = "string";
2439                         break;
2440                 case JSON_TOKEN_NUMBER:
2441                         type = "number";
2442                         break;
2443                 case JSON_TOKEN_TRUE:
2444                 case JSON_TOKEN_FALSE:
2445                         type = "boolean";
2446                         break;
2447                 case JSON_TOKEN_NULL:
2448                         type = "null";
2449                         break;
2450                 default:
2451                         elog(ERROR, "unexpected json token: %d", tok);
2452         }
2453
2454         PG_RETURN_TEXT_P(cstring_to_text(type));
2455 }