mirror of
https://github.com/postgres/postgres.git
synced 2025-07-28 23:42:10 +03:00
JSON generation improvements.
This adds the following: json_agg(anyrecord) -> json to_json(any) -> json hstore_to_json(hstore) -> json (also used as a cast) hstore_to_json_loose(hstore) -> json The last provides heuristic treatment of numbers and booleans. Also, in json generation, if any non-builtin type has a cast to json, that function is used instead of the type's output function. Andrew Dunstan, reviewed by Steve Singer. Catalog version bumped.
This commit is contained in:
@ -1453,3 +1453,39 @@ select count(*) from testhstore where h = 'pos=>98, line=>371, node=>CBA, indexe
|
|||||||
1
|
1
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
|
-- json
|
||||||
|
select hstore_to_json('"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4');
|
||||||
|
hstore_to_json
|
||||||
|
-------------------------------------------------------------------------------------------------
|
||||||
|
{"b": "t", "c": null, "d": "12345", "e": "012345", "f": "1.234", "g": "2.345e+4", "a key": "1"}
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
select cast( hstore '"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4' as json);
|
||||||
|
json
|
||||||
|
-------------------------------------------------------------------------------------------------
|
||||||
|
{"b": "t", "c": null, "d": "12345", "e": "012345", "f": "1.234", "g": "2.345e+4", "a key": "1"}
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
select hstore_to_json_loose('"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4');
|
||||||
|
hstore_to_json_loose
|
||||||
|
------------------------------------------------------------------------------------------
|
||||||
|
{"b": true, "c": null, "d": 12345, "e": "012345", "f": 1.234, "g": 2.345e+4, "a key": 1}
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
create table test_json_agg (f1 text, f2 hstore);
|
||||||
|
insert into test_json_agg values ('rec1','"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4'),
|
||||||
|
('rec2','"a key" =>2, b => f, c => "null", d=> -12345, e => 012345.6, f=> -1.234, g=> 0.345e-4');
|
||||||
|
select json_agg(q) from test_json_agg q;
|
||||||
|
json_agg
|
||||||
|
----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
[{"f1":"rec1","f2":{"b": "t", "c": null, "d": "12345", "e": "012345", "f": "1.234", "g": "2.345e+4", "a key": "1"}}, +
|
||||||
|
{"f1":"rec2","f2":{"b": "f", "c": "null", "d": "-12345", "e": "012345.6", "f": "-1.234", "g": "0.345e-4", "a key": "2"}}]
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
select json_agg(q) from (select f1, hstore_to_json_loose(f2) as f2 from test_json_agg) q;
|
||||||
|
json_agg
|
||||||
|
----------------------------------------------------------------------------------------------------------------------
|
||||||
|
[{"f1":"rec1","f2":{"b": true, "c": null, "d": 12345, "e": "012345", "f": 1.234, "g": 2.345e+4, "a key": 1}}, +
|
||||||
|
{"f1":"rec2","f2":{"b": false, "c": "null", "d": -12345, "e": "012345.6", "f": -1.234, "g": 0.345e-4, "a key": 2}}]
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
@ -234,6 +234,19 @@ LANGUAGE C IMMUTABLE STRICT;
|
|||||||
CREATE CAST (text[] AS hstore)
|
CREATE CAST (text[] AS hstore)
|
||||||
WITH FUNCTION hstore(text[]);
|
WITH FUNCTION hstore(text[]);
|
||||||
|
|
||||||
|
CREATE FUNCTION hstore_to_json(hstore)
|
||||||
|
RETURNS json
|
||||||
|
AS 'MODULE_PATHNAME', 'hstore_to_json'
|
||||||
|
LANGUAGE C IMMUTABLE STRICT;
|
||||||
|
|
||||||
|
CREATE CAST (hstore AS json)
|
||||||
|
WITH FUNCTION hstore_to_json(hstore);
|
||||||
|
|
||||||
|
CREATE FUNCTION hstore_to_json_loose(hstore)
|
||||||
|
RETURNS json
|
||||||
|
AS 'MODULE_PATHNAME', 'hstore_to_json_loose'
|
||||||
|
LANGUAGE C IMMUTABLE STRICT;
|
||||||
|
|
||||||
CREATE FUNCTION hstore(record)
|
CREATE FUNCTION hstore(record)
|
||||||
RETURNS hstore
|
RETURNS hstore
|
||||||
AS 'MODULE_PATHNAME', 'hstore_from_record'
|
AS 'MODULE_PATHNAME', 'hstore_from_record'
|
||||||
|
@ -8,7 +8,10 @@
|
|||||||
#include "access/htup_details.h"
|
#include "access/htup_details.h"
|
||||||
#include "catalog/pg_type.h"
|
#include "catalog/pg_type.h"
|
||||||
#include "funcapi.h"
|
#include "funcapi.h"
|
||||||
|
#include "lib/stringinfo.h"
|
||||||
#include "libpq/pqformat.h"
|
#include "libpq/pqformat.h"
|
||||||
|
#include "utils/builtins.h"
|
||||||
|
#include "utils/json.h"
|
||||||
#include "utils/lsyscache.h"
|
#include "utils/lsyscache.h"
|
||||||
#include "utils/typcache.h"
|
#include "utils/typcache.h"
|
||||||
|
|
||||||
@ -1209,3 +1212,217 @@ hstore_send(PG_FUNCTION_ARGS)
|
|||||||
|
|
||||||
PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
|
PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* hstore_to_json_loose
|
||||||
|
*
|
||||||
|
* This is a heuristic conversion to json which treats
|
||||||
|
* 't' and 'f' as booleans and strings that look like numbers as numbers,
|
||||||
|
* as long as they don't start with a leading zero followed by another digit
|
||||||
|
* (think zip codes or phone numbers starting with 0).
|
||||||
|
*/
|
||||||
|
PG_FUNCTION_INFO_V1(hstore_to_json_loose);
|
||||||
|
Datum hstore_to_json_loose(PG_FUNCTION_ARGS);
|
||||||
|
Datum
|
||||||
|
hstore_to_json_loose(PG_FUNCTION_ARGS)
|
||||||
|
{
|
||||||
|
HStore *in = PG_GETARG_HS(0);
|
||||||
|
int buflen,
|
||||||
|
i;
|
||||||
|
int count = HS_COUNT(in);
|
||||||
|
char *out,
|
||||||
|
*ptr;
|
||||||
|
char *base = STRPTR(in);
|
||||||
|
HEntry *entries = ARRPTR(in);
|
||||||
|
bool is_number;
|
||||||
|
StringInfo src,
|
||||||
|
dst;
|
||||||
|
|
||||||
|
if (count == 0)
|
||||||
|
{
|
||||||
|
out = palloc(1);
|
||||||
|
*out = '\0';
|
||||||
|
PG_RETURN_TEXT_P(cstring_to_text(out));
|
||||||
|
}
|
||||||
|
|
||||||
|
buflen = 3;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Formula adjusted slightly from the logic in hstore_out. We have to take
|
||||||
|
* account of out treatment of booleans to be a bit more pessimistic about
|
||||||
|
* the length of values.
|
||||||
|
*/
|
||||||
|
|
||||||
|
for (i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
/* include "" and colon-space and comma-space */
|
||||||
|
buflen += 6 + 2 * HS_KEYLEN(entries, i);
|
||||||
|
/* include "" only if nonnull */
|
||||||
|
buflen += 3 + (HS_VALISNULL(entries, i)
|
||||||
|
? 1
|
||||||
|
: 2 * HS_VALLEN(entries, i));
|
||||||
|
}
|
||||||
|
|
||||||
|
out = ptr = palloc(buflen);
|
||||||
|
|
||||||
|
src = makeStringInfo();
|
||||||
|
dst = makeStringInfo();
|
||||||
|
|
||||||
|
*ptr++ = '{';
|
||||||
|
|
||||||
|
for (i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
resetStringInfo(src);
|
||||||
|
resetStringInfo(dst);
|
||||||
|
appendBinaryStringInfo(src, HS_KEY(entries, base, i), HS_KEYLEN(entries, i));
|
||||||
|
escape_json(dst, src->data);
|
||||||
|
strncpy(ptr, dst->data, dst->len);
|
||||||
|
ptr += dst->len;
|
||||||
|
*ptr++ = ':';
|
||||||
|
*ptr++ = ' ';
|
||||||
|
resetStringInfo(dst);
|
||||||
|
if (HS_VALISNULL(entries, i))
|
||||||
|
appendStringInfoString(dst, "null");
|
||||||
|
/* guess that values of 't' or 'f' are booleans */
|
||||||
|
else if (HS_VALLEN(entries, i) == 1 && *(HS_VAL(entries, base, i)) == 't')
|
||||||
|
appendStringInfoString(dst, "true");
|
||||||
|
else if (HS_VALLEN(entries, i) == 1 && *(HS_VAL(entries, base, i)) == 'f')
|
||||||
|
appendStringInfoString(dst, "false");
|
||||||
|
else
|
||||||
|
{
|
||||||
|
is_number = false;
|
||||||
|
resetStringInfo(src);
|
||||||
|
appendBinaryStringInfo(src, HS_VAL(entries, base, i), HS_VALLEN(entries, i));
|
||||||
|
|
||||||
|
/*
|
||||||
|
* don't treat something with a leading zero followed by another
|
||||||
|
* digit as numeric - could be a zip code or similar
|
||||||
|
*/
|
||||||
|
if (src->len > 0 && (src->data[0] != '0' || !isdigit(src->data[1])) &&
|
||||||
|
strspn(src->data, "+-0123456789Ee.") == src->len)
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
* might be a number. See if we can input it as a numeric
|
||||||
|
* value
|
||||||
|
*/
|
||||||
|
char *endptr = "junk";
|
||||||
|
|
||||||
|
(void) (strtol(src->data, &endptr, 10) + 1);
|
||||||
|
if (*endptr == '\0')
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
* strol man page says this means the whole string is
|
||||||
|
* valid
|
||||||
|
*/
|
||||||
|
is_number = true;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
/* not an int - try a double */
|
||||||
|
(void) (strtod(src->data, &endptr) + 1.0);
|
||||||
|
if (*endptr == '\0')
|
||||||
|
is_number = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (is_number)
|
||||||
|
appendBinaryStringInfo(dst, src->data, src->len);
|
||||||
|
else
|
||||||
|
escape_json(dst, src->data);
|
||||||
|
}
|
||||||
|
strncpy(ptr, dst->data, dst->len);
|
||||||
|
ptr += dst->len;
|
||||||
|
|
||||||
|
if (i + 1 != count)
|
||||||
|
{
|
||||||
|
*ptr++ = ',';
|
||||||
|
*ptr++ = ' ';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*ptr++ = '}';
|
||||||
|
*ptr = '\0';
|
||||||
|
|
||||||
|
PG_RETURN_TEXT_P(cstring_to_text(out));
|
||||||
|
}
|
||||||
|
|
||||||
|
PG_FUNCTION_INFO_V1(hstore_to_json);
|
||||||
|
Datum hstore_to_json(PG_FUNCTION_ARGS);
|
||||||
|
Datum
|
||||||
|
hstore_to_json(PG_FUNCTION_ARGS)
|
||||||
|
{
|
||||||
|
HStore *in = PG_GETARG_HS(0);
|
||||||
|
int buflen,
|
||||||
|
i;
|
||||||
|
int count = HS_COUNT(in);
|
||||||
|
char *out,
|
||||||
|
*ptr;
|
||||||
|
char *base = STRPTR(in);
|
||||||
|
HEntry *entries = ARRPTR(in);
|
||||||
|
StringInfo src,
|
||||||
|
dst;
|
||||||
|
|
||||||
|
if (count == 0)
|
||||||
|
{
|
||||||
|
out = palloc(1);
|
||||||
|
*out = '\0';
|
||||||
|
PG_RETURN_TEXT_P(cstring_to_text(out));
|
||||||
|
}
|
||||||
|
|
||||||
|
buflen = 3;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Formula adjusted slightly from the logic in hstore_out. We have to take
|
||||||
|
* account of out treatment of booleans to be a bit more pessimistic about
|
||||||
|
* the length of values.
|
||||||
|
*/
|
||||||
|
|
||||||
|
for (i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
/* include "" and colon-space and comma-space */
|
||||||
|
buflen += 6 + 2 * HS_KEYLEN(entries, i);
|
||||||
|
/* include "" only if nonnull */
|
||||||
|
buflen += 3 + (HS_VALISNULL(entries, i)
|
||||||
|
? 1
|
||||||
|
: 2 * HS_VALLEN(entries, i));
|
||||||
|
}
|
||||||
|
|
||||||
|
out = ptr = palloc(buflen);
|
||||||
|
|
||||||
|
src = makeStringInfo();
|
||||||
|
dst = makeStringInfo();
|
||||||
|
|
||||||
|
*ptr++ = '{';
|
||||||
|
|
||||||
|
for (i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
resetStringInfo(src);
|
||||||
|
resetStringInfo(dst);
|
||||||
|
appendBinaryStringInfo(src, HS_KEY(entries, base, i), HS_KEYLEN(entries, i));
|
||||||
|
escape_json(dst, src->data);
|
||||||
|
strncpy(ptr, dst->data, dst->len);
|
||||||
|
ptr += dst->len;
|
||||||
|
*ptr++ = ':';
|
||||||
|
*ptr++ = ' ';
|
||||||
|
resetStringInfo(dst);
|
||||||
|
if (HS_VALISNULL(entries, i))
|
||||||
|
appendStringInfoString(dst, "null");
|
||||||
|
else
|
||||||
|
{
|
||||||
|
resetStringInfo(src);
|
||||||
|
appendBinaryStringInfo(src, HS_VAL(entries, base, i), HS_VALLEN(entries, i));
|
||||||
|
escape_json(dst, src->data);
|
||||||
|
}
|
||||||
|
strncpy(ptr, dst->data, dst->len);
|
||||||
|
ptr += dst->len;
|
||||||
|
|
||||||
|
if (i + 1 != count)
|
||||||
|
{
|
||||||
|
*ptr++ = ',';
|
||||||
|
*ptr++ = ' ';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*ptr++ = '}';
|
||||||
|
*ptr = '\0';
|
||||||
|
|
||||||
|
PG_RETURN_TEXT_P(cstring_to_text(out));
|
||||||
|
}
|
||||||
|
@ -330,3 +330,15 @@ set enable_seqscan=off;
|
|||||||
|
|
||||||
select count(*) from testhstore where h #># 'p=>1';
|
select count(*) from testhstore where h #># 'p=>1';
|
||||||
select count(*) from testhstore where h = 'pos=>98, line=>371, node=>CBA, indexed=>t';
|
select count(*) from testhstore where h = 'pos=>98, line=>371, node=>CBA, indexed=>t';
|
||||||
|
|
||||||
|
-- json
|
||||||
|
select hstore_to_json('"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4');
|
||||||
|
select cast( hstore '"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4' as json);
|
||||||
|
select hstore_to_json_loose('"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4');
|
||||||
|
|
||||||
|
create table test_json_agg (f1 text, f2 hstore);
|
||||||
|
insert into test_json_agg values ('rec1','"a key" =>1, b => t, c => null, d=> 12345, e => 012345, f=> 1.234, g=> 2.345e+4'),
|
||||||
|
('rec2','"a key" =>2, b => f, c => "null", d=> -12345, e => 012345.6, f=> -1.234, g=> 0.345e-4');
|
||||||
|
select json_agg(q) from test_json_agg q;
|
||||||
|
select json_agg(q) from (select f1, hstore_to_json_loose(f2) as f2 from test_json_agg) q;
|
||||||
|
|
||||||
|
@ -9685,10 +9685,41 @@ table2-mapping
|
|||||||
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
|
<entry><literal>row_to_json(row(1,'foo'))</literal></entry>
|
||||||
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
|
<entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
|
||||||
</row>
|
</row>
|
||||||
|
<row>
|
||||||
|
<entry>
|
||||||
|
<indexterm>
|
||||||
|
<primary>to_json</primary>
|
||||||
|
</indexterm>
|
||||||
|
<literal>to_json(anyelement)</literal>
|
||||||
|
</entry>
|
||||||
|
<entry>
|
||||||
|
Returns the value as JSON. If the data type is not builtin, and there
|
||||||
|
is a cast from the type to json, the cast function will be used to
|
||||||
|
perform the conversion. Otherwise, for any value other than a number,
|
||||||
|
a boolean or NULL, the text representation will be used, escaped and
|
||||||
|
quoted so that it is legal JSON.
|
||||||
|
</entry>
|
||||||
|
<entry><literal>to_json('Fred said "Hi."'</literal></entry>
|
||||||
|
<entry><literal>"Fred said \"Hi.\""</literal></entry>
|
||||||
|
</row>
|
||||||
</tbody>
|
</tbody>
|
||||||
</tgroup>
|
</tgroup>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<note>
|
||||||
|
<para>
|
||||||
|
The <xref linkend="hstore"> extension has a cast from hstore to
|
||||||
|
json, so that converted hstore values are represented as json objects,
|
||||||
|
not as string values.
|
||||||
|
</para>
|
||||||
|
</note>
|
||||||
|
|
||||||
|
<para>
|
||||||
|
See also <xref linkend="functions-aggregate"> about the aggregate
|
||||||
|
function <function>json_agg</function> which aggregates record
|
||||||
|
values as json efficiently.
|
||||||
|
</para>
|
||||||
|
|
||||||
</sect1>
|
</sect1>
|
||||||
|
|
||||||
<sect1 id="functions-sequence">
|
<sect1 id="functions-sequence">
|
||||||
@ -11059,6 +11090,22 @@ SELECT NULLIF(value, '(none)') ...
|
|||||||
<entry>equivalent to <function>bool_and</function></entry>
|
<entry>equivalent to <function>bool_and</function></entry>
|
||||||
</row>
|
</row>
|
||||||
|
|
||||||
|
<row>
|
||||||
|
<entry>
|
||||||
|
<indexterm>
|
||||||
|
<primary>json_agg</primary>
|
||||||
|
</indexterm>
|
||||||
|
<function>json_agg(<replaceable class="parameter">record</replaceable>)</function>
|
||||||
|
</entry>
|
||||||
|
<entry>
|
||||||
|
<type>record</type>
|
||||||
|
</entry>
|
||||||
|
<entry>
|
||||||
|
<type>json</type>
|
||||||
|
</entry>
|
||||||
|
<entry>aggregates records as a json array of objects</entry>
|
||||||
|
</row>
|
||||||
|
|
||||||
<row>
|
<row>
|
||||||
<entry>
|
<entry>
|
||||||
<indexterm>
|
<indexterm>
|
||||||
@ -11204,6 +11251,7 @@ SELECT count(*) FROM sometable;
|
|||||||
|
|
||||||
<para>
|
<para>
|
||||||
The aggregate functions <function>array_agg</function>,
|
The aggregate functions <function>array_agg</function>,
|
||||||
|
<function>json_agg</function>,
|
||||||
<function>string_agg</function>,
|
<function>string_agg</function>,
|
||||||
and <function>xmlagg</function>, as well as similar user-defined
|
and <function>xmlagg</function>, as well as similar user-defined
|
||||||
aggregate functions, produce meaningfully different result values
|
aggregate functions, produce meaningfully different result values
|
||||||
|
@ -322,6 +322,22 @@ b
|
|||||||
<entry><literal>{{a,1},{b,2}}</literal></entry>
|
<entry><literal>{{a,1},{b,2}}</literal></entry>
|
||||||
</row>
|
</row>
|
||||||
|
|
||||||
|
<row>
|
||||||
|
<entry><function>hstore_to_json(hstore)</function></entry>
|
||||||
|
<entry><type>json</type></entry>
|
||||||
|
<entry>get <type>hstore</type> as a json value</entry>
|
||||||
|
<entry><literal>hstore_to_json('"a key"=>1, b=>t, c=>null, d=>12345, e=>012345, f=>1.234, g=>2.345e+4')</literal></entry>
|
||||||
|
<entry><literal>{"a key": "1", "b": "t", "c": null, "d": "12345", "e": "012345", "f": "1.234", "g": "2.345e+4"}</literal></entry>
|
||||||
|
</row>
|
||||||
|
|
||||||
|
<row>
|
||||||
|
<entry><function>hstore_to_json_loose(hstore)</function></entry>
|
||||||
|
<entry><type>json</type></entry>
|
||||||
|
<entry>get <type>hstore</type> as a json value, but attempting to distinguish numerical and boolean values so they are unquoted in the json</entry>
|
||||||
|
<entry><literal>hstore_to_json('"a key"=>1, b=>t, c=>null, d=>12345, e=>012345, f=>1.234, g=>2.345e+4')</literal></entry>
|
||||||
|
<entry><literal>{"a key": 1, "b": true, "c": null, "d": 12345, "e": "012345", "f": 1.234, "g": 2.345e+4}</literal></entry>
|
||||||
|
</row>
|
||||||
|
|
||||||
<row>
|
<row>
|
||||||
<entry><function>slice(hstore, text[])</function></entry>
|
<entry><function>slice(hstore, text[])</function></entry>
|
||||||
<entry><type>hstore</type></entry>
|
<entry><type>hstore</type></entry>
|
||||||
@ -396,6 +412,13 @@ b
|
|||||||
</tgroup>
|
</tgroup>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<note>
|
||||||
|
<para>
|
||||||
|
The function <function>hstore_to_json</function> is used when an <type>hstore</type>
|
||||||
|
value is cast to <type>json</type>.
|
||||||
|
</para>
|
||||||
|
</note>
|
||||||
|
|
||||||
<note>
|
<note>
|
||||||
<para>
|
<para>
|
||||||
The function <function>populate_record</function> is actually declared
|
The function <function>populate_record</function> is actually declared
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
#include "postgres.h"
|
#include "postgres.h"
|
||||||
|
|
||||||
#include "access/htup_details.h"
|
#include "access/htup_details.h"
|
||||||
|
#include "access/transam.h"
|
||||||
|
#include "catalog/pg_cast.h"
|
||||||
#include "catalog/pg_type.h"
|
#include "catalog/pg_type.h"
|
||||||
#include "executor/spi.h"
|
#include "executor/spi.h"
|
||||||
#include "lib/stringinfo.h"
|
#include "lib/stringinfo.h"
|
||||||
@ -24,6 +26,7 @@
|
|||||||
#include "utils/builtins.h"
|
#include "utils/builtins.h"
|
||||||
#include "utils/lsyscache.h"
|
#include "utils/lsyscache.h"
|
||||||
#include "utils/json.h"
|
#include "utils/json.h"
|
||||||
|
#include "utils/syscache.h"
|
||||||
#include "utils/typcache.h"
|
#include "utils/typcache.h"
|
||||||
|
|
||||||
typedef enum /* types of JSON values */
|
typedef enum /* types of JSON values */
|
||||||
@ -42,7 +45,7 @@ typedef struct /* state of JSON lexer */
|
|||||||
{
|
{
|
||||||
char *input; /* whole string being parsed */
|
char *input; /* whole string being parsed */
|
||||||
char *token_start; /* start of current token within input */
|
char *token_start; /* start of current token within input */
|
||||||
char *token_terminator; /* end of previous or current token */
|
char *token_terminator; /* end of previous or current token */
|
||||||
JsonValueType token_type; /* type of current token, once it's known */
|
JsonValueType token_type; /* type of current token, once it's known */
|
||||||
} JsonLexContext;
|
} JsonLexContext;
|
||||||
|
|
||||||
@ -67,7 +70,7 @@ typedef enum /* required operations on state stack */
|
|||||||
{
|
{
|
||||||
JSON_STACKOP_NONE, /* no-op */
|
JSON_STACKOP_NONE, /* no-op */
|
||||||
JSON_STACKOP_PUSH, /* push new JSON_PARSE_VALUE stack item */
|
JSON_STACKOP_PUSH, /* push new JSON_PARSE_VALUE stack item */
|
||||||
JSON_STACKOP_PUSH_WITH_PUSHBACK, /* push, then rescan current token */
|
JSON_STACKOP_PUSH_WITH_PUSHBACK, /* push, then rescan current token */
|
||||||
JSON_STACKOP_POP /* pop, or expect end of input if no stack */
|
JSON_STACKOP_POP /* pop, or expect end of input if no stack */
|
||||||
} JsonStackOp;
|
} JsonStackOp;
|
||||||
|
|
||||||
@ -77,19 +80,25 @@ static void json_lex_string(JsonLexContext *lex);
|
|||||||
static void json_lex_number(JsonLexContext *lex, char *s);
|
static void json_lex_number(JsonLexContext *lex, char *s);
|
||||||
static void report_parse_error(JsonParseStack *stack, JsonLexContext *lex);
|
static void report_parse_error(JsonParseStack *stack, JsonLexContext *lex);
|
||||||
static void report_invalid_token(JsonLexContext *lex);
|
static void report_invalid_token(JsonLexContext *lex);
|
||||||
static int report_json_context(JsonLexContext *lex);
|
static int report_json_context(JsonLexContext *lex);
|
||||||
static char *extract_mb_char(char *s);
|
static char *extract_mb_char(char *s);
|
||||||
static void composite_to_json(Datum composite, StringInfo result,
|
static void composite_to_json(Datum composite, StringInfo result,
|
||||||
bool use_line_feeds);
|
bool use_line_feeds);
|
||||||
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
|
static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
|
||||||
Datum *vals, bool *nulls, int *valcount,
|
Datum *vals, bool *nulls, int *valcount,
|
||||||
TYPCATEGORY tcategory, Oid typoutputfunc,
|
TYPCATEGORY tcategory, Oid typoutputfunc,
|
||||||
bool use_line_feeds);
|
bool use_line_feeds);
|
||||||
static void array_to_json_internal(Datum array, StringInfo result,
|
static void array_to_json_internal(Datum array, StringInfo result,
|
||||||
bool use_line_feeds);
|
bool use_line_feeds);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* All the defined type categories are upper case , so use lower case here
|
||||||
|
* so we avoid any possible clash.
|
||||||
|
*/
|
||||||
/* fake type category for JSON so we can distinguish it in datum_to_json */
|
/* fake type category for JSON so we can distinguish it in datum_to_json */
|
||||||
#define TYPCATEGORY_JSON 'j'
|
#define TYPCATEGORY_JSON 'j'
|
||||||
|
/* fake category for types that have a cast to json */
|
||||||
|
#define TYPCATEGORY_JSON_CAST 'c'
|
||||||
/* letters appearing in numeric output that aren't valid in a JSON number */
|
/* letters appearing in numeric output that aren't valid in a JSON number */
|
||||||
#define NON_NUMERIC_LETTER "NnAaIiFfTtYy"
|
#define NON_NUMERIC_LETTER "NnAaIiFfTtYy"
|
||||||
/* chars to consider as part of an alphanumeric token */
|
/* chars to consider as part of an alphanumeric token */
|
||||||
@ -384,15 +393,15 @@ json_lex(JsonLexContext *lex)
|
|||||||
* unintuitive prefix thereof.
|
* unintuitive prefix thereof.
|
||||||
*/
|
*/
|
||||||
for (p = s; JSON_ALPHANUMERIC_CHAR(*p); p++)
|
for (p = s; JSON_ALPHANUMERIC_CHAR(*p); p++)
|
||||||
/* skip */ ;
|
/* skip */ ;
|
||||||
|
|
||||||
if (p == s)
|
if (p == s)
|
||||||
{
|
{
|
||||||
/*
|
/*
|
||||||
* We got some sort of unexpected punctuation or an otherwise
|
* We got some sort of unexpected punctuation or an otherwise
|
||||||
* unexpected character, so just complain about that one
|
* unexpected character, so just complain about that one
|
||||||
* character. (It can't be multibyte because the above loop
|
* character. (It can't be multibyte because the above loop will
|
||||||
* will advance over any multibyte characters.)
|
* advance over any multibyte characters.)
|
||||||
*/
|
*/
|
||||||
lex->token_terminator = s + 1;
|
lex->token_terminator = s + 1;
|
||||||
report_invalid_token(lex);
|
report_invalid_token(lex);
|
||||||
@ -585,7 +594,7 @@ json_lex_number(JsonLexContext *lex, char *s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Check for trailing garbage. As in json_lex(), any alphanumeric stuff
|
* Check for trailing garbage. As in json_lex(), any alphanumeric stuff
|
||||||
* here should be considered part of the token for error-reporting
|
* here should be considered part of the token for error-reporting
|
||||||
* purposes.
|
* purposes.
|
||||||
*/
|
*/
|
||||||
@ -653,16 +662,16 @@ report_parse_error(JsonParseStack *stack, JsonLexContext *lex)
|
|||||||
ereport(ERROR,
|
ereport(ERROR,
|
||||||
(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
|
(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
|
||||||
errmsg("invalid input syntax for type json"),
|
errmsg("invalid input syntax for type json"),
|
||||||
errdetail("Expected \",\" or \"]\", but found \"%s\".",
|
errdetail("Expected \",\" or \"]\", but found \"%s\".",
|
||||||
token),
|
token),
|
||||||
report_json_context(lex)));
|
report_json_context(lex)));
|
||||||
break;
|
break;
|
||||||
case JSON_PARSE_OBJECT_START:
|
case JSON_PARSE_OBJECT_START:
|
||||||
ereport(ERROR,
|
ereport(ERROR,
|
||||||
(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
|
(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
|
||||||
errmsg("invalid input syntax for type json"),
|
errmsg("invalid input syntax for type json"),
|
||||||
errdetail("Expected string or \"}\", but found \"%s\".",
|
errdetail("Expected string or \"}\", but found \"%s\".",
|
||||||
token),
|
token),
|
||||||
report_json_context(lex)));
|
report_json_context(lex)));
|
||||||
break;
|
break;
|
||||||
case JSON_PARSE_OBJECT_LABEL:
|
case JSON_PARSE_OBJECT_LABEL:
|
||||||
@ -677,8 +686,8 @@ report_parse_error(JsonParseStack *stack, JsonLexContext *lex)
|
|||||||
ereport(ERROR,
|
ereport(ERROR,
|
||||||
(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
|
(errcode(ERRCODE_INVALID_TEXT_REPRESENTATION),
|
||||||
errmsg("invalid input syntax for type json"),
|
errmsg("invalid input syntax for type json"),
|
||||||
errdetail("Expected \",\" or \"}\", but found \"%s\".",
|
errdetail("Expected \",\" or \"}\", but found \"%s\".",
|
||||||
token),
|
token),
|
||||||
report_json_context(lex)));
|
report_json_context(lex)));
|
||||||
break;
|
break;
|
||||||
case JSON_PARSE_OBJECT_COMMA:
|
case JSON_PARSE_OBJECT_COMMA:
|
||||||
@ -820,6 +829,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
|
|||||||
TYPCATEGORY tcategory, Oid typoutputfunc)
|
TYPCATEGORY tcategory, Oid typoutputfunc)
|
||||||
{
|
{
|
||||||
char *outputstr;
|
char *outputstr;
|
||||||
|
text *jsontext;
|
||||||
|
|
||||||
if (is_null)
|
if (is_null)
|
||||||
{
|
{
|
||||||
@ -862,6 +872,13 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
|
|||||||
appendStringInfoString(result, outputstr);
|
appendStringInfoString(result, outputstr);
|
||||||
pfree(outputstr);
|
pfree(outputstr);
|
||||||
break;
|
break;
|
||||||
|
case TYPCATEGORY_JSON_CAST:
|
||||||
|
jsontext = DatumGetTextP(OidFunctionCall1(typoutputfunc, val));
|
||||||
|
outputstr = text_to_cstring(jsontext);
|
||||||
|
appendStringInfoString(result, outputstr);
|
||||||
|
pfree(outputstr);
|
||||||
|
pfree(jsontext);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
outputstr = OidOutputFunctionCall(typoutputfunc, val);
|
outputstr = OidOutputFunctionCall(typoutputfunc, val);
|
||||||
escape_json(result, outputstr);
|
escape_json(result, outputstr);
|
||||||
@ -935,6 +952,7 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
|
|||||||
Oid typioparam;
|
Oid typioparam;
|
||||||
Oid typoutputfunc;
|
Oid typoutputfunc;
|
||||||
TYPCATEGORY tcategory;
|
TYPCATEGORY tcategory;
|
||||||
|
Oid castfunc = InvalidOid;
|
||||||
|
|
||||||
ndim = ARR_NDIM(v);
|
ndim = ARR_NDIM(v);
|
||||||
dim = ARR_DIMS(v);
|
dim = ARR_DIMS(v);
|
||||||
@ -950,11 +968,32 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
|
|||||||
&typlen, &typbyval, &typalign,
|
&typlen, &typbyval, &typalign,
|
||||||
&typdelim, &typioparam, &typoutputfunc);
|
&typdelim, &typioparam, &typoutputfunc);
|
||||||
|
|
||||||
|
if (element_type > FirstNormalObjectId)
|
||||||
|
{
|
||||||
|
HeapTuple tuple;
|
||||||
|
Form_pg_cast castForm;
|
||||||
|
|
||||||
|
tuple = SearchSysCache2(CASTSOURCETARGET,
|
||||||
|
ObjectIdGetDatum(element_type),
|
||||||
|
ObjectIdGetDatum(JSONOID));
|
||||||
|
if (HeapTupleIsValid(tuple))
|
||||||
|
{
|
||||||
|
castForm = (Form_pg_cast) GETSTRUCT(tuple);
|
||||||
|
|
||||||
|
if (castForm->castmethod == COERCION_METHOD_FUNCTION)
|
||||||
|
castfunc = typoutputfunc = castForm->castfunc;
|
||||||
|
|
||||||
|
ReleaseSysCache(tuple);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
deconstruct_array(v, element_type, typlen, typbyval,
|
deconstruct_array(v, element_type, typlen, typbyval,
|
||||||
typalign, &elements, &nulls,
|
typalign, &elements, &nulls,
|
||||||
&nitems);
|
&nitems);
|
||||||
|
|
||||||
if (element_type == RECORDOID)
|
if (castfunc != InvalidOid)
|
||||||
|
tcategory = TYPCATEGORY_JSON_CAST;
|
||||||
|
else if (element_type == RECORDOID)
|
||||||
tcategory = TYPCATEGORY_COMPOSITE;
|
tcategory = TYPCATEGORY_COMPOSITE;
|
||||||
else if (element_type == JSONOID)
|
else if (element_type == JSONOID)
|
||||||
tcategory = TYPCATEGORY_JSON;
|
tcategory = TYPCATEGORY_JSON;
|
||||||
@ -1009,6 +1048,7 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
|
|||||||
TYPCATEGORY tcategory;
|
TYPCATEGORY tcategory;
|
||||||
Oid typoutput;
|
Oid typoutput;
|
||||||
bool typisvarlena;
|
bool typisvarlena;
|
||||||
|
Oid castfunc = InvalidOid;
|
||||||
|
|
||||||
if (tupdesc->attrs[i]->attisdropped)
|
if (tupdesc->attrs[i]->attisdropped)
|
||||||
continue;
|
continue;
|
||||||
@ -1023,7 +1063,31 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
|
|||||||
|
|
||||||
origval = heap_getattr(tuple, i + 1, tupdesc, &isnull);
|
origval = heap_getattr(tuple, i + 1, tupdesc, &isnull);
|
||||||
|
|
||||||
if (tupdesc->attrs[i]->atttypid == RECORDARRAYOID)
|
getTypeOutputInfo(tupdesc->attrs[i]->atttypid,
|
||||||
|
&typoutput, &typisvarlena);
|
||||||
|
|
||||||
|
if (tupdesc->attrs[i]->atttypid > FirstNormalObjectId)
|
||||||
|
{
|
||||||
|
HeapTuple cast_tuple;
|
||||||
|
Form_pg_cast castForm;
|
||||||
|
|
||||||
|
cast_tuple = SearchSysCache2(CASTSOURCETARGET,
|
||||||
|
ObjectIdGetDatum(tupdesc->attrs[i]->atttypid),
|
||||||
|
ObjectIdGetDatum(JSONOID));
|
||||||
|
if (HeapTupleIsValid(cast_tuple))
|
||||||
|
{
|
||||||
|
castForm = (Form_pg_cast) GETSTRUCT(cast_tuple);
|
||||||
|
|
||||||
|
if (castForm->castmethod == COERCION_METHOD_FUNCTION)
|
||||||
|
castfunc = typoutput = castForm->castfunc;
|
||||||
|
|
||||||
|
ReleaseSysCache(cast_tuple);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (castfunc != InvalidOid)
|
||||||
|
tcategory = TYPCATEGORY_JSON_CAST;
|
||||||
|
else if (tupdesc->attrs[i]->atttypid == RECORDARRAYOID)
|
||||||
tcategory = TYPCATEGORY_ARRAY;
|
tcategory = TYPCATEGORY_ARRAY;
|
||||||
else if (tupdesc->attrs[i]->atttypid == RECORDOID)
|
else if (tupdesc->attrs[i]->atttypid == RECORDOID)
|
||||||
tcategory = TYPCATEGORY_COMPOSITE;
|
tcategory = TYPCATEGORY_COMPOSITE;
|
||||||
@ -1032,9 +1096,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
|
|||||||
else
|
else
|
||||||
tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
|
tcategory = TypeCategory(tupdesc->attrs[i]->atttypid);
|
||||||
|
|
||||||
getTypeOutputInfo(tupdesc->attrs[i]->atttypid,
|
|
||||||
&typoutput, &typisvarlena);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* If we have a toasted datum, forcibly detoast it here to avoid
|
* If we have a toasted datum, forcibly detoast it here to avoid
|
||||||
* memory leakage inside the type's output routine.
|
* memory leakage inside the type's output routine.
|
||||||
@ -1121,6 +1182,222 @@ row_to_json_pretty(PG_FUNCTION_ARGS)
|
|||||||
PG_RETURN_TEXT_P(cstring_to_text(result->data));
|
PG_RETURN_TEXT_P(cstring_to_text(result->data));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* SQL function to_json(anyvalue)
|
||||||
|
*/
|
||||||
|
Datum
|
||||||
|
to_json(PG_FUNCTION_ARGS)
|
||||||
|
{
|
||||||
|
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
|
||||||
|
StringInfo result;
|
||||||
|
Datum orig_val,
|
||||||
|
val;
|
||||||
|
TYPCATEGORY tcategory;
|
||||||
|
Oid typoutput;
|
||||||
|
bool typisvarlena;
|
||||||
|
Oid castfunc = InvalidOid;
|
||||||
|
|
||||||
|
if (val_type == InvalidOid)
|
||||||
|
ereport(ERROR,
|
||||||
|
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||||
|
errmsg("could not determine input data type")));
|
||||||
|
|
||||||
|
|
||||||
|
result = makeStringInfo();
|
||||||
|
|
||||||
|
orig_val = PG_ARGISNULL(0) ? (Datum) 0 : PG_GETARG_DATUM(0);
|
||||||
|
|
||||||
|
getTypeOutputInfo(val_type, &typoutput, &typisvarlena);
|
||||||
|
|
||||||
|
if (val_type > FirstNormalObjectId)
|
||||||
|
{
|
||||||
|
HeapTuple tuple;
|
||||||
|
Form_pg_cast castForm;
|
||||||
|
|
||||||
|
tuple = SearchSysCache2(CASTSOURCETARGET,
|
||||||
|
ObjectIdGetDatum(val_type),
|
||||||
|
ObjectIdGetDatum(JSONOID));
|
||||||
|
if (HeapTupleIsValid(tuple))
|
||||||
|
{
|
||||||
|
castForm = (Form_pg_cast) GETSTRUCT(tuple);
|
||||||
|
|
||||||
|
if (castForm->castmethod == COERCION_METHOD_FUNCTION)
|
||||||
|
castfunc = typoutput = castForm->castfunc;
|
||||||
|
|
||||||
|
ReleaseSysCache(tuple);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (castfunc != InvalidOid)
|
||||||
|
tcategory = TYPCATEGORY_JSON_CAST;
|
||||||
|
else if (val_type == RECORDARRAYOID)
|
||||||
|
tcategory = TYPCATEGORY_ARRAY;
|
||||||
|
else if (val_type == RECORDOID)
|
||||||
|
tcategory = TYPCATEGORY_COMPOSITE;
|
||||||
|
else if (val_type == JSONOID)
|
||||||
|
tcategory = TYPCATEGORY_JSON;
|
||||||
|
else
|
||||||
|
tcategory = TypeCategory(val_type);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If we have a toasted datum, forcibly detoast it here to avoid memory
|
||||||
|
* leakage inside the type's output routine.
|
||||||
|
*/
|
||||||
|
if (typisvarlena && orig_val != (Datum) 0)
|
||||||
|
val = PointerGetDatum(PG_DETOAST_DATUM(orig_val));
|
||||||
|
else
|
||||||
|
val = orig_val;
|
||||||
|
|
||||||
|
datum_to_json(val, false, result, tcategory, typoutput);
|
||||||
|
|
||||||
|
/* Clean up detoasted copy, if any */
|
||||||
|
if (val != orig_val)
|
||||||
|
pfree(DatumGetPointer(val));
|
||||||
|
|
||||||
|
PG_RETURN_TEXT_P(cstring_to_text(result->data));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* json_agg transition function
|
||||||
|
*/
|
||||||
|
Datum
|
||||||
|
json_agg_transfn(PG_FUNCTION_ARGS)
|
||||||
|
{
|
||||||
|
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
|
||||||
|
MemoryContext aggcontext,
|
||||||
|
oldcontext;
|
||||||
|
StringInfo state;
|
||||||
|
Datum orig_val,
|
||||||
|
val;
|
||||||
|
TYPCATEGORY tcategory;
|
||||||
|
Oid typoutput;
|
||||||
|
bool typisvarlena;
|
||||||
|
Oid castfunc = InvalidOid;
|
||||||
|
|
||||||
|
if (val_type == InvalidOid)
|
||||||
|
ereport(ERROR,
|
||||||
|
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
|
||||||
|
errmsg("could not determine input data type")));
|
||||||
|
|
||||||
|
if (!AggCheckCallContext(fcinfo, &aggcontext))
|
||||||
|
{
|
||||||
|
/* cannot be called directly because of internal-type argument */
|
||||||
|
elog(ERROR, "json_agg_transfn called in non-aggregate context");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (PG_ARGISNULL(0))
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
* Make this StringInfo in a context where it will persist for the
|
||||||
|
* duration off the aggregate call. It's only needed for this initial
|
||||||
|
* piece, as the StringInfo routines make sure they use the right
|
||||||
|
* context to enlarge the object if necessary.
|
||||||
|
*/
|
||||||
|
oldcontext = MemoryContextSwitchTo(aggcontext);
|
||||||
|
state = makeStringInfo();
|
||||||
|
MemoryContextSwitchTo(oldcontext);
|
||||||
|
|
||||||
|
appendStringInfoChar(state, '[');
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
state = (StringInfo) PG_GETARG_POINTER(0);
|
||||||
|
appendStringInfoString(state, ", ");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* fast path for NULLs */
|
||||||
|
if (PG_ARGISNULL(1))
|
||||||
|
{
|
||||||
|
orig_val = (Datum) 0;
|
||||||
|
datum_to_json(orig_val, true, state, 0, InvalidOid);
|
||||||
|
PG_RETURN_POINTER(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
orig_val = PG_GETARG_DATUM(1);
|
||||||
|
|
||||||
|
getTypeOutputInfo(val_type, &typoutput, &typisvarlena);
|
||||||
|
|
||||||
|
if (val_type > FirstNormalObjectId)
|
||||||
|
{
|
||||||
|
HeapTuple tuple;
|
||||||
|
Form_pg_cast castForm;
|
||||||
|
|
||||||
|
tuple = SearchSysCache2(CASTSOURCETARGET,
|
||||||
|
ObjectIdGetDatum(val_type),
|
||||||
|
ObjectIdGetDatum(JSONOID));
|
||||||
|
if (HeapTupleIsValid(tuple))
|
||||||
|
{
|
||||||
|
castForm = (Form_pg_cast) GETSTRUCT(tuple);
|
||||||
|
|
||||||
|
if (castForm->castmethod == COERCION_METHOD_FUNCTION)
|
||||||
|
castfunc = typoutput = castForm->castfunc;
|
||||||
|
|
||||||
|
ReleaseSysCache(tuple);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (castfunc != InvalidOid)
|
||||||
|
tcategory = TYPCATEGORY_JSON_CAST;
|
||||||
|
else if (val_type == RECORDARRAYOID)
|
||||||
|
tcategory = TYPCATEGORY_ARRAY;
|
||||||
|
else if (val_type == RECORDOID)
|
||||||
|
tcategory = TYPCATEGORY_COMPOSITE;
|
||||||
|
else if (val_type == JSONOID)
|
||||||
|
tcategory = TYPCATEGORY_JSON;
|
||||||
|
else
|
||||||
|
tcategory = TypeCategory(val_type);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If we have a toasted datum, forcibly detoast it here to avoid memory
|
||||||
|
* leakage inside the type's output routine.
|
||||||
|
*/
|
||||||
|
if (typisvarlena)
|
||||||
|
val = PointerGetDatum(PG_DETOAST_DATUM(orig_val));
|
||||||
|
else
|
||||||
|
val = orig_val;
|
||||||
|
|
||||||
|
if (!PG_ARGISNULL(0) &&
|
||||||
|
(tcategory == TYPCATEGORY_ARRAY || tcategory == TYPCATEGORY_COMPOSITE))
|
||||||
|
{
|
||||||
|
appendStringInfoString(state, "\n ");
|
||||||
|
}
|
||||||
|
|
||||||
|
datum_to_json(val, false, state, tcategory, typoutput);
|
||||||
|
|
||||||
|
/* Clean up detoasted copy, if any */
|
||||||
|
if (val != orig_val)
|
||||||
|
pfree(DatumGetPointer(val));
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The transition type for array_agg() is declared to be "internal", which
|
||||||
|
* is a pass-by-value type the same size as a pointer. So we can safely
|
||||||
|
* pass the ArrayBuildState pointer through nodeAgg.c's machinations.
|
||||||
|
*/
|
||||||
|
PG_RETURN_POINTER(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* json_agg final function
|
||||||
|
*/
|
||||||
|
Datum
|
||||||
|
json_agg_finalfn(PG_FUNCTION_ARGS)
|
||||||
|
{
|
||||||
|
StringInfo state;
|
||||||
|
|
||||||
|
/* cannot be called directly because of internal-type argument */
|
||||||
|
Assert(AggCheckCallContext(fcinfo, NULL));
|
||||||
|
|
||||||
|
state = PG_ARGISNULL(0) ? NULL : (StringInfo) PG_GETARG_POINTER(0);
|
||||||
|
|
||||||
|
if (state == NULL)
|
||||||
|
PG_RETURN_NULL();
|
||||||
|
|
||||||
|
appendStringInfoChar(state, ']');
|
||||||
|
|
||||||
|
PG_RETURN_TEXT_P(cstring_to_text(state->data));
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Produce a JSON string literal, properly escaping characters in the text.
|
* Produce a JSON string literal, properly escaping characters in the text.
|
||||||
*/
|
*/
|
||||||
|
@ -53,6 +53,6 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/* yyyymmddN */
|
/* yyyymmddN */
|
||||||
#define CATALOG_VERSION_NO 201303081
|
#define CATALOG_VERSION_NO 201303101
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -232,6 +232,9 @@ DATA(insert ( 3538 string_agg_transfn string_agg_finalfn 0 2281 _null_ ));
|
|||||||
/* bytea */
|
/* bytea */
|
||||||
DATA(insert ( 3545 bytea_string_agg_transfn bytea_string_agg_finalfn 0 2281 _null_ ));
|
DATA(insert ( 3545 bytea_string_agg_transfn bytea_string_agg_finalfn 0 2281 _null_ ));
|
||||||
|
|
||||||
|
/* json */
|
||||||
|
DATA(insert ( 3175 json_agg_transfn json_agg_finalfn 0 2281 _null_ ));
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* prototypes for functions in pg_aggregate.c
|
* prototypes for functions in pg_aggregate.c
|
||||||
*/
|
*/
|
||||||
|
@ -4106,6 +4106,14 @@ DATA(insert OID = 3155 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1
|
|||||||
DESCR("map row to json");
|
DESCR("map row to json");
|
||||||
DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
|
DATA(insert OID = 3156 ( row_to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
|
||||||
DESCR("map row to json with optional pretty printing");
|
DESCR("map row to json with optional pretty printing");
|
||||||
|
DATA(insert OID = 3173 ( json_agg_transfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
|
||||||
|
DESCR("json aggregate transition function");
|
||||||
|
DATA(insert OID = 3174 ( json_agg_finalfn PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
|
||||||
|
DESCR("json aggregate final function");
|
||||||
|
DATA(insert OID = 3175 ( json_agg PGNSP PGUID 12 1 0 0 0 t f f f f f i 1 0 114 "2283" _null_ _null_ _null_ _null_ aggregate_dummy _null_ _null_ _null_ ));
|
||||||
|
DESCR("aggregate input into json");
|
||||||
|
DATA(insert OID = 3176 ( to_json PGNSP PGUID 12 1 0 0 0 f f f f t f s 1 0 114 "2283" _null_ _null_ _null_ _null_ to_json _null_ _null_ _null_ ));
|
||||||
|
DESCR("map input to json");
|
||||||
|
|
||||||
/* uuid */
|
/* uuid */
|
||||||
DATA(insert OID = 2952 ( uuid_in PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 2950 "2275" _null_ _null_ _null_ _null_ uuid_in _null_ _null_ _null_ ));
|
DATA(insert OID = 2952 ( uuid_in PGNSP PGUID 12 1 0 0 0 f f f f t f i 1 0 2950 "2275" _null_ _null_ _null_ _null_ uuid_in _null_ _null_ _null_ ));
|
||||||
|
@ -25,6 +25,11 @@ extern Datum array_to_json(PG_FUNCTION_ARGS);
|
|||||||
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
|
extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
|
||||||
extern Datum row_to_json(PG_FUNCTION_ARGS);
|
extern Datum row_to_json(PG_FUNCTION_ARGS);
|
||||||
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
|
extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
|
||||||
|
extern Datum to_json(PG_FUNCTION_ARGS);
|
||||||
|
|
||||||
|
extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
|
||||||
|
extern Datum json_agg_finalfn(PG_FUNCTION_ARGS);
|
||||||
|
|
||||||
extern void escape_json(StringInfo buf, const char *str);
|
extern void escape_json(StringInfo buf, const char *str);
|
||||||
|
|
||||||
#endif /* JSON_H */
|
#endif /* JSON_H */
|
||||||
|
@ -403,6 +403,30 @@ SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),
|
|||||||
{"f1":[5,6,7,8,9,10]}
|
{"f1":[5,6,7,8,9,10]}
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
|
--json_agg
|
||||||
|
SELECT json_agg(q)
|
||||||
|
FROM ( SELECT $$a$$ || x AS b, y AS c,
|
||||||
|
ARRAY[ROW(x.*,ARRAY[1,2,3]),
|
||||||
|
ROW(y.*,ARRAY[4,5,6])] AS z
|
||||||
|
FROM generate_series(1,2) x,
|
||||||
|
generate_series(4,5) y) q;
|
||||||
|
json_agg
|
||||||
|
-----------------------------------------------------------------------
|
||||||
|
[{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}, +
|
||||||
|
{"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}, +
|
||||||
|
{"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}, +
|
||||||
|
{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}]
|
||||||
|
(1 row)
|
||||||
|
|
||||||
|
SELECT json_agg(q)
|
||||||
|
FROM rows q;
|
||||||
|
json_agg
|
||||||
|
-----------------------
|
||||||
|
[{"x":1,"y":"txt1"}, +
|
||||||
|
{"x":2,"y":"txt2"}, +
|
||||||
|
{"x":3,"y":"txt3"}]
|
||||||
|
(1 row)
|
||||||
|
|
||||||
-- non-numeric output
|
-- non-numeric output
|
||||||
SELECT row_to_json(q)
|
SELECT row_to_json(q)
|
||||||
FROM (SELECT 'NaN'::float8 AS "float8field") q;
|
FROM (SELECT 'NaN'::float8 AS "float8field") q;
|
||||||
|
@ -100,6 +100,18 @@ FROM rows q;
|
|||||||
|
|
||||||
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
|
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
|
||||||
|
|
||||||
|
--json_agg
|
||||||
|
|
||||||
|
SELECT json_agg(q)
|
||||||
|
FROM ( SELECT $$a$$ || x AS b, y AS c,
|
||||||
|
ARRAY[ROW(x.*,ARRAY[1,2,3]),
|
||||||
|
ROW(y.*,ARRAY[4,5,6])] AS z
|
||||||
|
FROM generate_series(1,2) x,
|
||||||
|
generate_series(4,5) y) q;
|
||||||
|
|
||||||
|
SELECT json_agg(q)
|
||||||
|
FROM rows q;
|
||||||
|
|
||||||
-- non-numeric output
|
-- non-numeric output
|
||||||
SELECT row_to_json(q)
|
SELECT row_to_json(q)
|
||||||
FROM (SELECT 'NaN'::float8 AS "float8field") q;
|
FROM (SELECT 'NaN'::float8 AS "float8field") q;
|
||||||
|
Reference in New Issue
Block a user