1
0
mirror of https://github.com/postgres/postgres.git synced 2025-11-10 17:42:29 +03:00

Revert SQL/JSON features

The reverts the following and makes some associated cleanups:

    commit f79b803dc: Common SQL/JSON clauses
    commit f4fb45d15: SQL/JSON constructors
    commit 5f0adec25: Make STRING an unreserved_keyword.
    commit 33a377608: IS JSON predicate
    commit 1a36bc9db: SQL/JSON query functions
    commit 606948b05: SQL JSON functions
    commit 49082c2cc: RETURNING clause for JSON() and JSON_SCALAR()
    commit 4e34747c8: JSON_TABLE
    commit fadb48b00: PLAN clauses for JSON_TABLE
    commit 2ef6f11b0: Reduce running time of jsonb_sqljson test
    commit 14d3f24fa: Further improve jsonb_sqljson parallel test
    commit a6baa4bad: Documentation for SQL/JSON features
    commit b46bcf7a4: Improve readability of SQL/JSON documentation.
    commit 112fdb352: Fix finalization for json_objectagg and friends
    commit fcdb35c32: Fix transformJsonBehavior
    commit 4cd8717af: Improve a couple of sql/json error messages
    commit f7a605f63: Small cleanups in SQL/JSON code
    commit 9c3d25e17: Fix JSON_OBJECTAGG uniquefying bug
    commit a79153b7a: Claim SQL standard compliance for SQL/JSON features
    commit a1e7616d6: Rework SQL/JSON documentation
    commit 8d9f9634e: Fix errors in copyfuncs/equalfuncs support for JSON node types.
    commit 3c633f32b: Only allow returning string types or bytea from json_serialize
    commit 67b26703b: expression eval: Fix EEOP_JSON_CONSTRUCTOR and EEOP_JSONEXPR size.

The release notes are also adjusted.

Backpatch to release 15.

Discussion: https://postgr.es/m/40d2c882-bcac-19a9-754d-4299e1d87ac7@postgresql.org
This commit is contained in:
Andrew Dunstan
2022-09-01 17:07:14 -04:00
parent 90247e742f
commit 2f2b18bd3f
60 changed files with 348 additions and 14893 deletions

View File

@@ -527,20 +527,20 @@ T652 SQL-dynamic statements in SQL routines NO
T653 SQL-schema statements in external routines YES
T654 SQL-dynamic statements in external routines NO
T655 Cyclically dependent routines YES
T811 Basic SQL/JSON constructor functions YES
T812 SQL/JSON: JSON_OBJECTAGG YES
T813 SQL/JSON: JSON_ARRAYAGG with ORDER BY YES
T814 Colon in JSON_OBJECT or JSON_OBJECTAGG YES
T821 Basic SQL/JSON query operators YES
T822 SQL/JSON: IS JSON WITH UNIQUE KEYS predicate YES
T823 SQL/JSON: PASSING clause YES
T824 JSON_TABLE: specific PLAN clause YES
T825 SQL/JSON: ON EMPTY and ON ERROR clauses YES
T826 General value expression in ON ERROR or ON EMPTY clauses YES
T827 JSON_TABLE: sibling NESTED COLUMNS clauses YES
T828 JSON_QUERY YES
T829 JSON_QUERY: array wrapper options YES
T830 Enforcing unique keys in SQL/JSON constructor functions YES
T811 Basic SQL/JSON constructor functions NO
T812 SQL/JSON: JSON_OBJECTAGG NO
T813 SQL/JSON: JSON_ARRAYAGG with ORDER BY NO
T814 Colon in JSON_OBJECT or JSON_OBJECTAGG NO
T821 Basic SQL/JSON query operators NO
T822 SQL/JSON: IS JSON WITH UNIQUE KEYS predicate NO
T823 SQL/JSON: PASSING clause NO
T824 JSON_TABLE: specific PLAN clause NO
T825 SQL/JSON: ON EMPTY and ON ERROR clauses NO
T826 General value expression in ON ERROR or ON EMPTY clauses NO
T827 JSON_TABLE: sibling NESTED COLUMNS clauses NO
T828 JSON_QUERY NO
T829 JSON_QUERY: array wrapper options NO
T830 Enforcing unique keys in SQL/JSON constructor functions NO
T831 SQL/JSON path language: strict mode YES
T832 SQL/JSON path language: item method YES
T833 SQL/JSON path language: multiple subscripts YES
@@ -548,7 +548,7 @@ T834 SQL/JSON path language: wildcard member accessor YES
T835 SQL/JSON path language: filter expressions YES
T836 SQL/JSON path language: starts with predicate YES
T837 SQL/JSON path language: regex_like predicate YES
T838 JSON_TABLE: PLAN DEFAULT clause YES
T838 JSON_TABLE: PLAN DEFAULT clause NO
T839 Formatted cast of datetimes to/from character strings NO
M001 Datalinks NO
M002 Datalinks via SQL/CLI NO

View File

@@ -3851,13 +3851,7 @@ ExplainTargetRel(Plan *plan, Index rti, ExplainState *es)
break;
case T_TableFuncScan:
Assert(rte->rtekind == RTE_TABLEFUNC);
if (rte->tablefunc)
if (rte->tablefunc->functype == TFT_XMLTABLE)
objectname = "xmltable";
else /* Must be TFT_JSON_TABLE */
objectname = "json_table";
else
objectname = NULL;
objectname = "xmltable";
objecttag = "Table Function Name";
break;
case T_ValuesScan:

View File

@@ -47,9 +47,6 @@
#include "utils/array.h"
#include "utils/builtins.h"
#include "utils/datum.h"
#include "utils/json.h"
#include "utils/jsonb.h"
#include "utils/jsonpath.h"
#include "utils/lsyscache.h"
#include "utils/typcache.h"
@@ -88,40 +85,6 @@ static void ExecBuildAggTransCall(ExprState *state, AggState *aggstate,
bool nullcheck);
static ExprState *
ExecInitExprInternal(Expr *node, PlanState *parent, ParamListInfo ext_params,
Datum *caseval, bool *casenull)
{
ExprState *state;
ExprEvalStep scratch = {0};
/* Special case: NULL expression produces a NULL ExprState pointer */
if (node == NULL)
return NULL;
/* Initialize ExprState with empty step list */
state = makeNode(ExprState);
state->expr = node;
state->parent = parent;
state->ext_params = ext_params;
state->innermost_caseval = caseval;
state->innermost_casenull = casenull;
/* Insert EEOP_*_FETCHSOME steps as needed */
ExecInitExprSlots(state, (Node *) node);
/* Compile the expression proper */
ExecInitExprRec(node, state, &state->resvalue, &state->resnull);
/* Finally, append a DONE step */
scratch.opcode = EEOP_DONE;
ExprEvalPushStep(state, &scratch);
ExecReadyExpr(state);
return state;
}
/*
* ExecInitExpr: prepare an expression tree for execution
*
@@ -159,7 +122,32 @@ ExecInitExprInternal(Expr *node, PlanState *parent, ParamListInfo ext_params,
ExprState *
ExecInitExpr(Expr *node, PlanState *parent)
{
return ExecInitExprInternal(node, parent, NULL, NULL, NULL);
ExprState *state;
ExprEvalStep scratch = {0};
/* Special case: NULL expression produces a NULL ExprState pointer */
if (node == NULL)
return NULL;
/* Initialize ExprState with empty step list */
state = makeNode(ExprState);
state->expr = node;
state->parent = parent;
state->ext_params = NULL;
/* Insert EEOP_*_FETCHSOME steps as needed */
ExecInitExprSlots(state, (Node *) node);
/* Compile the expression proper */
ExecInitExprRec(node, state, &state->resvalue, &state->resnull);
/* Finally, append a DONE step */
scratch.opcode = EEOP_DONE;
ExprEvalPushStep(state, &scratch);
ExecReadyExpr(state);
return state;
}
/*
@@ -171,20 +159,32 @@ ExecInitExpr(Expr *node, PlanState *parent)
ExprState *
ExecInitExprWithParams(Expr *node, ParamListInfo ext_params)
{
return ExecInitExprInternal(node, NULL, ext_params, NULL, NULL);
}
ExprState *state;
ExprEvalStep scratch = {0};
/*
* ExecInitExprWithCaseValue: prepare an expression tree for execution
*
* This is the same as ExecInitExpr, except that a pointer to the value for
* CasTestExpr is passed here.
*/
ExprState *
ExecInitExprWithCaseValue(Expr *node, PlanState *parent,
Datum *caseval, bool *casenull)
{
return ExecInitExprInternal(node, parent, NULL, caseval, casenull);
/* Special case: NULL expression produces a NULL ExprState pointer */
if (node == NULL)
return NULL;
/* Initialize ExprState with empty step list */
state = makeNode(ExprState);
state->expr = node;
state->parent = NULL;
state->ext_params = ext_params;
/* Insert EEOP_*_FETCHSOME steps as needed */
ExecInitExprSlots(state, (Node *) node);
/* Compile the expression proper */
ExecInitExprRec(node, state, &state->resvalue, &state->resnull);
/* Finally, append a DONE step */
scratch.opcode = EEOP_DONE;
ExprEvalPushStep(state, &scratch);
ExecReadyExpr(state);
return state;
}
/*
@@ -2411,263 +2411,6 @@ ExecInitExprRec(Expr *node, ExprState *state,
break;
}
case T_JsonValueExpr:
{
JsonValueExpr *jve = (JsonValueExpr *) node;
ExecInitExprRec(jve->raw_expr, state, resv, resnull);
if (jve->formatted_expr)
{
Datum *innermost_caseval = state->innermost_caseval;
bool *innermost_isnull = state->innermost_casenull;
state->innermost_caseval = resv;
state->innermost_casenull = resnull;
ExecInitExprRec(jve->formatted_expr, state, resv, resnull);
state->innermost_caseval = innermost_caseval;
state->innermost_casenull = innermost_isnull;
}
break;
}
case T_JsonConstructorExpr:
{
JsonConstructorExpr *ctor = (JsonConstructorExpr *) node;
List *args = ctor->args;
ListCell *lc;
int nargs = list_length(args);
int argno = 0;
if (ctor->func)
{
ExecInitExprRec(ctor->func, state, resv, resnull);
}
else if ((ctor->type == JSCTOR_JSON_PARSE && !ctor->unique) ||
ctor->type == JSCTOR_JSON_SERIALIZE)
{
/* Use the value of the first argument as a result */
ExecInitExprRec(linitial(args), state, resv, resnull);
}
else
{
JsonConstructorExprState *jcstate;
jcstate = palloc0(sizeof(JsonConstructorExprState));
scratch.opcode = EEOP_JSON_CONSTRUCTOR;
scratch.d.json_constructor.jcstate = jcstate;
jcstate->constructor = ctor;
jcstate->arg_values = palloc(sizeof(Datum) * nargs);
jcstate->arg_nulls = palloc(sizeof(bool) * nargs);
jcstate->arg_types = palloc(sizeof(Oid) * nargs);
jcstate->nargs = nargs;
foreach(lc, args)
{
Expr *arg = (Expr *) lfirst(lc);
jcstate->arg_types[argno] = exprType((Node *) arg);
if (IsA(arg, Const))
{
/* Don't evaluate const arguments every round */
Const *con = (Const *) arg;
jcstate->arg_values[argno] = con->constvalue;
jcstate->arg_nulls[argno] = con->constisnull;
}
else
{
ExecInitExprRec(arg, state,
&jcstate->arg_values[argno],
&jcstate->arg_nulls[argno]);
}
argno++;
}
/* prepare type cache for datum_to_json[b]() */
if (ctor->type == JSCTOR_JSON_SCALAR)
{
bool is_jsonb =
ctor->returning->format->format_type == JS_FORMAT_JSONB;
jcstate->arg_type_cache =
palloc(sizeof(*jcstate->arg_type_cache) * nargs);
for (int i = 0; i < nargs; i++)
{
int category;
Oid outfuncid;
Oid typid = jcstate->arg_types[i];
if (is_jsonb)
{
JsonbTypeCategory jbcat;
jsonb_categorize_type(typid, &jbcat, &outfuncid);
category = (int) jbcat;
}
else
{
JsonTypeCategory jscat;
json_categorize_type(typid, &jscat, &outfuncid);
category = (int) jscat;
}
jcstate->arg_type_cache[i].outfuncid = outfuncid;
jcstate->arg_type_cache[i].category = category;
}
}
ExprEvalPushStep(state, &scratch);
}
if (ctor->coercion)
{
Datum *innermost_caseval = state->innermost_caseval;
bool *innermost_isnull = state->innermost_casenull;
state->innermost_caseval = resv;
state->innermost_casenull = resnull;
ExecInitExprRec(ctor->coercion, state, resv, resnull);
state->innermost_caseval = innermost_caseval;
state->innermost_casenull = innermost_isnull;
}
}
break;
case T_JsonIsPredicate:
{
JsonIsPredicate *pred = (JsonIsPredicate *) node;
ExecInitExprRec((Expr *) pred->expr, state, resv, resnull);
scratch.opcode = EEOP_IS_JSON;
scratch.d.is_json.pred = pred;
ExprEvalPushStep(state, &scratch);
break;
}
case T_JsonExpr:
{
JsonExpr *jexpr = castNode(JsonExpr, node);
JsonExprState *jsestate = palloc0(sizeof(JsonExprState));
ListCell *argexprlc;
ListCell *argnamelc;
scratch.opcode = EEOP_JSONEXPR;
scratch.d.jsonexpr.jsestate = jsestate;
jsestate->jsexpr = jexpr;
jsestate->formatted_expr =
palloc(sizeof(*jsestate->formatted_expr));
ExecInitExprRec((Expr *) jexpr->formatted_expr, state,
&jsestate->formatted_expr->value,
&jsestate->formatted_expr->isnull);
jsestate->pathspec =
palloc(sizeof(*jsestate->pathspec));
ExecInitExprRec((Expr *) jexpr->path_spec, state,
&jsestate->pathspec->value,
&jsestate->pathspec->isnull);
jsestate->res_expr =
palloc(sizeof(*jsestate->res_expr));
jsestate->result_expr = jexpr->result_coercion
? ExecInitExprWithCaseValue((Expr *) jexpr->result_coercion->expr,
state->parent,
&jsestate->res_expr->value,
&jsestate->res_expr->isnull)
: NULL;
jsestate->default_on_empty = !jexpr->on_empty ? NULL :
ExecInitExpr((Expr *) jexpr->on_empty->default_expr,
state->parent);
jsestate->default_on_error =
ExecInitExpr((Expr *) jexpr->on_error->default_expr,
state->parent);
if (jexpr->omit_quotes ||
(jexpr->result_coercion && jexpr->result_coercion->via_io))
{
Oid typinput;
/* lookup the result type's input function */
getTypeInputInfo(jexpr->returning->typid, &typinput,
&jsestate->input.typioparam);
fmgr_info(typinput, &jsestate->input.func);
}
jsestate->args = NIL;
forboth(argexprlc, jexpr->passing_values,
argnamelc, jexpr->passing_names)
{
Expr *argexpr = (Expr *) lfirst(argexprlc);
String *argname = lfirst_node(String, argnamelc);
JsonPathVariableEvalContext *var = palloc(sizeof(*var));
var->name = pstrdup(argname->sval);
var->typid = exprType((Node *) argexpr);
var->typmod = exprTypmod((Node *) argexpr);
var->estate = ExecInitExpr(argexpr, state->parent);
var->econtext = NULL;
var->mcxt = NULL;
var->evaluated = false;
var->value = (Datum) 0;
var->isnull = true;
jsestate->args =
lappend(jsestate->args, var);
}
jsestate->cache = NULL;
if (jexpr->coercions)
{
JsonCoercion **coercion;
struct JsonCoercionState *cstate;
Datum *caseval;
bool *casenull;
jsestate->coercion_expr =
palloc(sizeof(*jsestate->coercion_expr));
caseval = &jsestate->coercion_expr->value;
casenull = &jsestate->coercion_expr->isnull;
for (cstate = &jsestate->coercions.null,
coercion = &jexpr->coercions->null;
coercion <= &jexpr->coercions->composite;
coercion++, cstate++)
{
cstate->coercion = *coercion;
cstate->estate = *coercion ?
ExecInitExprWithCaseValue((Expr *) (*coercion)->expr,
state->parent,
caseval, casenull) : NULL;
}
}
ExprEvalPushStep(state, &scratch);
break;
}
default:
elog(ERROR, "unrecognized node type: %d",
(int) nodeTag(node));

View File

@@ -57,31 +57,22 @@
#include "postgres.h"
#include "access/heaptoast.h"
#include "access/xact.h"
#include "catalog/pg_proc.h"
#include "catalog/pg_type.h"
#include "commands/sequence.h"
#include "executor/execExpr.h"
#include "executor/nodeSubplan.h"
#include "funcapi.h"
#include "miscadmin.h"
#include "nodes/makefuncs.h"
#include "nodes/nodeFuncs.h"
#include "parser/parsetree.h"
#include "parser/parse_expr.h"
#include "pgstat.h"
#include "utils/array.h"
#include "utils/builtins.h"
#include "utils/date.h"
#include "utils/datum.h"
#include "utils/expandedrecord.h"
#include "utils/json.h"
#include "utils/jsonb.h"
#include "utils/jsonfuncs.h"
#include "utils/jsonpath.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
#include "utils/resowner.h"
#include "utils/timestamp.h"
#include "utils/typcache.h"
#include "utils/xml.h"
@@ -488,9 +479,6 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull)
&&CASE_EEOP_GROUPING_FUNC,
&&CASE_EEOP_WINDOW_FUNC,
&&CASE_EEOP_SUBPLAN,
&&CASE_EEOP_JSON_CONSTRUCTOR,
&&CASE_EEOP_IS_JSON,
&&CASE_EEOP_JSONEXPR,
&&CASE_EEOP_AGG_STRICT_DESERIALIZE,
&&CASE_EEOP_AGG_DESERIALIZE,
&&CASE_EEOP_AGG_STRICT_INPUT_CHECK_ARGS,
@@ -1824,27 +1812,7 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull)
{
/* too complex for an inline implementation */
ExecEvalAggOrderedTransTuple(state, op, econtext);
EEO_NEXT();
}
EEO_CASE(EEOP_JSON_CONSTRUCTOR)
{
/* too complex for an inline implementation */
ExecEvalJsonConstructor(state, op, econtext);
EEO_NEXT();
}
EEO_CASE(EEOP_IS_JSON)
{
/* too complex for an inline implementation */
ExecEvalJsonIsPredicate(state, op);
EEO_NEXT();
}
EEO_CASE(EEOP_JSONEXPR)
{
/* too complex for an inline implementation */
ExecEvalJson(state, op, econtext);
EEO_NEXT();
}
@@ -3972,91 +3940,6 @@ ExecEvalXmlExpr(ExprState *state, ExprEvalStep *op)
}
}
void
ExecEvalJsonIsPredicate(ExprState *state, ExprEvalStep *op)
{
JsonIsPredicate *pred = op->d.is_json.pred;
Datum js = *op->resvalue;
Oid exprtype;
bool res;
if (*op->resnull)
{
*op->resvalue = BoolGetDatum(false);
return;
}
exprtype = exprType(pred->expr);
if (exprtype == TEXTOID || exprtype == JSONOID)
{
text *json = DatumGetTextP(js);
if (pred->item_type == JS_TYPE_ANY)
res = true;
else
{
switch (json_get_first_token(json, false))
{
case JSON_TOKEN_OBJECT_START:
res = pred->item_type == JS_TYPE_OBJECT;
break;
case JSON_TOKEN_ARRAY_START:
res = pred->item_type == JS_TYPE_ARRAY;
break;
case JSON_TOKEN_STRING:
case JSON_TOKEN_NUMBER:
case JSON_TOKEN_TRUE:
case JSON_TOKEN_FALSE:
case JSON_TOKEN_NULL:
res = pred->item_type == JS_TYPE_SCALAR;
break;
default:
res = false;
break;
}
}
/*
* Do full parsing pass only for uniqueness check or for JSON text
* validation.
*/
if (res && (pred->unique_keys || exprtype == TEXTOID))
res = json_validate(json, pred->unique_keys, false);
}
else if (exprtype == JSONBOID)
{
if (pred->item_type == JS_TYPE_ANY)
res = true;
else
{
Jsonb *jb = DatumGetJsonbP(js);
switch (pred->item_type)
{
case JS_TYPE_OBJECT:
res = JB_ROOT_IS_OBJECT(jb);
break;
case JS_TYPE_ARRAY:
res = JB_ROOT_IS_ARRAY(jb) && !JB_ROOT_IS_SCALAR(jb);
break;
case JS_TYPE_SCALAR:
res = JB_ROOT_IS_ARRAY(jb) && JB_ROOT_IS_SCALAR(jb);
break;
default:
res = false;
break;
}
}
/* Key uniqueness check is redundant for jsonb */
}
else
res = false;
*op->resvalue = BoolGetDatum(res);
}
/*
* ExecEvalGroupingFunc
*
@@ -4619,629 +4502,3 @@ ExecAggPlainTransByRef(AggState *aggstate, AggStatePerTrans pertrans,
MemoryContextSwitchTo(oldContext);
}
/*
* Evaluate a JSON constructor expression.
*/
void
ExecEvalJsonConstructor(ExprState *state, ExprEvalStep *op,
ExprContext *econtext)
{
Datum res;
JsonConstructorExprState *jcstate = op->d.json_constructor.jcstate;
JsonConstructorExpr *ctor = jcstate->constructor;
bool is_jsonb = ctor->returning->format->format_type == JS_FORMAT_JSONB;
bool isnull = false;
if (ctor->type == JSCTOR_JSON_ARRAY)
res = (is_jsonb ?
jsonb_build_array_worker :
json_build_array_worker) (jcstate->nargs,
jcstate->arg_values,
jcstate->arg_nulls,
jcstate->arg_types,
ctor->absent_on_null);
else if (ctor->type == JSCTOR_JSON_OBJECT)
res = (is_jsonb ?
jsonb_build_object_worker :
json_build_object_worker) (jcstate->nargs,
jcstate->arg_values,
jcstate->arg_nulls,
jcstate->arg_types,
ctor->absent_on_null,
ctor->unique);
else if (ctor->type == JSCTOR_JSON_SCALAR)
{
if (jcstate->arg_nulls[0])
{
res = (Datum) 0;
isnull = true;
}
else
{
Datum value = jcstate->arg_values[0];
int category = jcstate->arg_type_cache[0].category;
Oid outfuncid = jcstate->arg_type_cache[0].outfuncid;
if (is_jsonb)
res = to_jsonb_worker(value, category, outfuncid);
else
res = to_json_worker(value, category, outfuncid);
}
}
else if (ctor->type == JSCTOR_JSON_PARSE)
{
if (jcstate->arg_nulls[0])
{
res = (Datum) 0;
isnull = true;
}
else
{
Datum value = jcstate->arg_values[0];
text *js = DatumGetTextP(value);
if (is_jsonb)
res = jsonb_from_text(js, true);
else
{
(void) json_validate(js, true, true);
res = value;
}
}
}
else
{
res = (Datum) 0;
elog(ERROR, "invalid JsonConstructorExpr type %d", ctor->type);
}
*op->resvalue = res;
*op->resnull = isnull;
}
/*
* Evaluate a JSON error/empty behavior result.
*/
static Datum
ExecEvalJsonBehavior(ExprContext *econtext, JsonBehavior *behavior,
ExprState *default_estate, bool *is_null)
{
*is_null = false;
switch (behavior->btype)
{
case JSON_BEHAVIOR_EMPTY_ARRAY:
return JsonbPGetDatum(JsonbMakeEmptyArray());
case JSON_BEHAVIOR_EMPTY_OBJECT:
return JsonbPGetDatum(JsonbMakeEmptyObject());
case JSON_BEHAVIOR_TRUE:
return BoolGetDatum(true);
case JSON_BEHAVIOR_FALSE:
return BoolGetDatum(false);
case JSON_BEHAVIOR_NULL:
case JSON_BEHAVIOR_UNKNOWN:
case JSON_BEHAVIOR_EMPTY:
*is_null = true;
return (Datum) 0;
case JSON_BEHAVIOR_DEFAULT:
return ExecEvalExpr(default_estate, econtext, is_null);
default:
elog(ERROR, "unrecognized SQL/JSON behavior %d", behavior->btype);
return (Datum) 0;
}
}
/*
* Evaluate a coercion of a JSON item to the target type.
*/
static Datum
ExecEvalJsonExprCoercion(ExprEvalStep *op, ExprContext *econtext,
Datum res, bool *isNull, void *p, bool *error)
{
ExprState *estate = p;
JsonExprState *jsestate;
if (estate) /* coerce using specified expression */
return ExecEvalExpr(estate, econtext, isNull);
jsestate = op->d.jsonexpr.jsestate;
if (jsestate->jsexpr->op != JSON_EXISTS_OP)
{
JsonCoercion *coercion = jsestate->jsexpr->result_coercion;
JsonExpr *jexpr = jsestate->jsexpr;
Jsonb *jb = *isNull ? NULL : DatumGetJsonbP(res);
if ((coercion && coercion->via_io) ||
(jexpr->omit_quotes && !*isNull &&
JB_ROOT_IS_SCALAR(jb)))
{
/* strip quotes and call typinput function */
char *str = *isNull ? NULL : JsonbUnquote(jb);
return InputFunctionCall(&jsestate->input.func, str,
jsestate->input.typioparam,
jexpr->returning->typmod);
}
else if (coercion && coercion->via_populate)
return json_populate_type(res, JSONBOID,
jexpr->returning->typid,
jexpr->returning->typmod,
&jsestate->cache,
econtext->ecxt_per_query_memory,
isNull);
}
if (jsestate->result_expr)
{
jsestate->res_expr->value = res;
jsestate->res_expr->isnull = *isNull;
res = ExecEvalExpr(jsestate->result_expr, econtext, isNull);
}
return res;
}
/*
* Evaluate a JSON path variable caching computed value.
*/
int
EvalJsonPathVar(void *cxt, char *varName, int varNameLen,
JsonbValue *val, JsonbValue *baseObject)
{
JsonPathVariableEvalContext *var = NULL;
List *vars = cxt;
ListCell *lc;
int id = 1;
if (!varName)
return list_length(vars);
foreach(lc, vars)
{
var = lfirst(lc);
if (!strncmp(var->name, varName, varNameLen))
break;
var = NULL;
id++;
}
if (!var)
return -1;
if (!var->evaluated)
{
MemoryContext oldcxt = var->mcxt ?
MemoryContextSwitchTo(var->mcxt) : NULL;
var->value = ExecEvalExpr(var->estate, var->econtext, &var->isnull);
var->evaluated = true;
if (oldcxt)
MemoryContextSwitchTo(oldcxt);
}
if (var->isnull)
{
val->type = jbvNull;
return 0;
}
JsonItemFromDatum(var->value, var->typid, var->typmod, val);
*baseObject = *val;
return id;
}
/*
* Prepare SQL/JSON item coercion to the output type. Returned a datum of the
* corresponding SQL type and a pointer to the coercion state.
*/
Datum
ExecPrepareJsonItemCoercion(JsonbValue *item,
JsonReturning *returning,
struct JsonCoercionsState *coercions,
struct JsonCoercionState **pcoercion)
{
struct JsonCoercionState *coercion;
Datum res;
JsonbValue buf;
if (item->type == jbvBinary &&
JsonContainerIsScalar(item->val.binary.data))
{
bool res PG_USED_FOR_ASSERTS_ONLY;
res = JsonbExtractScalar(item->val.binary.data, &buf);
item = &buf;
Assert(res);
}
/* get coercion state reference and datum of the corresponding SQL type */
switch (item->type)
{
case jbvNull:
coercion = &coercions->null;
res = (Datum) 0;
break;
case jbvString:
coercion = &coercions->string;
res = PointerGetDatum(cstring_to_text_with_len(item->val.string.val,
item->val.string.len));
break;
case jbvNumeric:
coercion = &coercions->numeric;
res = NumericGetDatum(item->val.numeric);
break;
case jbvBool:
coercion = &coercions->boolean;
res = BoolGetDatum(item->val.boolean);
break;
case jbvDatetime:
res = item->val.datetime.value;
switch (item->val.datetime.typid)
{
case DATEOID:
coercion = &coercions->date;
break;
case TIMEOID:
coercion = &coercions->time;
break;
case TIMETZOID:
coercion = &coercions->timetz;
break;
case TIMESTAMPOID:
coercion = &coercions->timestamp;
break;
case TIMESTAMPTZOID:
coercion = &coercions->timestamptz;
break;
default:
elog(ERROR, "unexpected jsonb datetime type oid %u",
item->val.datetime.typid);
return (Datum) 0;
}
break;
case jbvArray:
case jbvObject:
case jbvBinary:
coercion = &coercions->composite;
res = JsonbPGetDatum(JsonbValueToJsonb(item));
break;
default:
elog(ERROR, "unexpected jsonb value type %d", item->type);
return (Datum) 0;
}
*pcoercion = coercion;
return res;
}
typedef Datum (*JsonFunc) (ExprEvalStep *op, ExprContext *econtext,
Datum item, bool *resnull, void *p, bool *error);
static Datum
ExecEvalJsonExprSubtrans(JsonFunc func, ExprEvalStep *op,
ExprContext *econtext,
Datum res, bool *resnull,
void *p, bool *error, bool subtrans)
{
MemoryContext oldcontext;
ResourceOwner oldowner;
if (!subtrans)
/* No need to use subtransactions. */
return func(op, econtext, res, resnull, p, error);
/*
* We should catch exceptions of category ERRCODE_DATA_EXCEPTION and
* execute the corresponding ON ERROR behavior then.
*/
oldcontext = CurrentMemoryContext;
oldowner = CurrentResourceOwner;
Assert(error);
BeginInternalSubTransaction(NULL);
/* Want to execute expressions inside function's memory context */
MemoryContextSwitchTo(oldcontext);
PG_TRY();
{
res = func(op, econtext, res, resnull, p, error);
/* Commit the inner transaction, return to outer xact context */
ReleaseCurrentSubTransaction();
MemoryContextSwitchTo(oldcontext);
CurrentResourceOwner = oldowner;
}
PG_CATCH();
{
ErrorData *edata;
int ecategory;
/* Save error info in oldcontext */
MemoryContextSwitchTo(oldcontext);
edata = CopyErrorData();
FlushErrorState();
/* Abort the inner transaction */
RollbackAndReleaseCurrentSubTransaction();
MemoryContextSwitchTo(oldcontext);
CurrentResourceOwner = oldowner;
ecategory = ERRCODE_TO_CATEGORY(edata->sqlerrcode);
if (ecategory != ERRCODE_DATA_EXCEPTION && /* jsonpath and other data
* errors */
ecategory != ERRCODE_INTEGRITY_CONSTRAINT_VIOLATION) /* domain errors */
ReThrowError(edata);
res = (Datum) 0;
*error = true;
}
PG_END_TRY();
return res;
}
typedef struct
{
JsonPath *path;
bool *error;
bool coercionInSubtrans;
} ExecEvalJsonExprContext;
static Datum
ExecEvalJsonExpr(ExprEvalStep *op, ExprContext *econtext,
Datum item, bool *resnull, void *pcxt,
bool *error)
{
ExecEvalJsonExprContext *cxt = pcxt;
JsonPath *path = cxt->path;
JsonExprState *jsestate = op->d.jsonexpr.jsestate;
JsonExpr *jexpr = jsestate->jsexpr;
ExprState *estate = NULL;
bool empty = false;
Datum res = (Datum) 0;
switch (jexpr->op)
{
case JSON_QUERY_OP:
res = JsonPathQuery(item, path, jexpr->wrapper, &empty, error,
jsestate->args);
if (error && *error)
{
*resnull = true;
return (Datum) 0;
}
*resnull = !DatumGetPointer(res);
break;
case JSON_VALUE_OP:
{
struct JsonCoercionState *jcstate;
JsonbValue *jbv = JsonPathValue(item, path, &empty, error,
jsestate->args);
if (error && *error)
return (Datum) 0;
if (!jbv) /* NULL or empty */
break;
Assert(!empty);
*resnull = false;
/* coerce scalar item to the output type */
if (jexpr->returning->typid == JSONOID ||
jexpr->returning->typid == JSONBOID)
{
/* Use result coercion from json[b] to the output type */
res = JsonbPGetDatum(JsonbValueToJsonb(jbv));
break;
}
/* Use coercion from SQL/JSON item type to the output type */
res = ExecPrepareJsonItemCoercion(jbv,
jsestate->jsexpr->returning,
&jsestate->coercions,
&jcstate);
if (jcstate->coercion &&
(jcstate->coercion->via_io ||
jcstate->coercion->via_populate))
{
if (error)
{
*error = true;
return (Datum) 0;
}
/*
* Coercion via I/O means here that the cast to the target
* type simply does not exist.
*/
ereport(ERROR,
(errcode(ERRCODE_SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE),
errmsg("SQL/JSON item cannot be cast to target type")));
}
else if (!jcstate->estate)
return res; /* no coercion */
/* coerce using specific expression */
estate = jcstate->estate;
jsestate->coercion_expr->value = res;
jsestate->coercion_expr->isnull = *resnull;
break;
}
case JSON_EXISTS_OP:
{
bool exists = JsonPathExists(item, path,
jsestate->args,
error);
*resnull = error && *error;
res = BoolGetDatum(exists);
if (!jsestate->result_expr)
return res;
/* coerce using result expression */
estate = jsestate->result_expr;
jsestate->res_expr->value = res;
jsestate->res_expr->isnull = *resnull;
break;
}
case JSON_TABLE_OP:
*resnull = false;
return item;
default:
elog(ERROR, "unrecognized SQL/JSON expression op %d", jexpr->op);
return (Datum) 0;
}
if (empty)
{
Assert(jexpr->on_empty); /* it is not JSON_EXISTS */
if (jexpr->on_empty->btype == JSON_BEHAVIOR_ERROR)
{
if (error)
{
*error = true;
return (Datum) 0;
}
ereport(ERROR,
(errcode(ERRCODE_NO_SQL_JSON_ITEM),
errmsg("no SQL/JSON item")));
}
if (jexpr->on_empty->btype == JSON_BEHAVIOR_DEFAULT)
/*
* Execute DEFAULT expression as a coercion expression, because
* its result is already coerced to the target type.
*/
estate = jsestate->default_on_empty;
else
/* Execute ON EMPTY behavior */
res = ExecEvalJsonBehavior(econtext, jexpr->on_empty,
jsestate->default_on_empty,
resnull);
}
return ExecEvalJsonExprSubtrans(ExecEvalJsonExprCoercion, op, econtext,
res, resnull, estate, error,
cxt->coercionInSubtrans);
}
bool
ExecEvalJsonNeedsSubTransaction(JsonExpr *jsexpr,
struct JsonCoercionsState *coercions)
{
if (jsexpr->on_error->btype == JSON_BEHAVIOR_ERROR)
return false;
if (jsexpr->op == JSON_EXISTS_OP && !jsexpr->result_coercion)
return false;
if (!coercions)
return true;
return false;
}
/* ----------------------------------------------------------------
* ExecEvalJson
* ----------------------------------------------------------------
*/
void
ExecEvalJson(ExprState *state, ExprEvalStep *op, ExprContext *econtext)
{
ExecEvalJsonExprContext cxt;
JsonExprState *jsestate = op->d.jsonexpr.jsestate;
JsonExpr *jexpr = jsestate->jsexpr;
Datum item;
Datum res = (Datum) 0;
JsonPath *path;
ListCell *lc;
bool error = false;
bool needSubtrans;
bool throwErrors = jexpr->on_error->btype == JSON_BEHAVIOR_ERROR;
*op->resnull = true; /* until we get a result */
*op->resvalue = (Datum) 0;
if (jsestate->formatted_expr->isnull || jsestate->pathspec->isnull)
{
/* execute domain checks for NULLs */
(void) ExecEvalJsonExprCoercion(op, econtext, res, op->resnull,
NULL, NULL);
Assert(*op->resnull);
return;
}
item = jsestate->formatted_expr->value;
path = DatumGetJsonPathP(jsestate->pathspec->value);
/* reset JSON path variable contexts */
foreach(lc, jsestate->args)
{
JsonPathVariableEvalContext *var = lfirst(lc);
var->econtext = econtext;
var->evaluated = false;
}
needSubtrans = ExecEvalJsonNeedsSubTransaction(jexpr, &jsestate->coercions);
cxt.path = path;
cxt.error = throwErrors ? NULL : &error;
cxt.coercionInSubtrans = !needSubtrans && !throwErrors;
Assert(!needSubtrans || cxt.error);
res = ExecEvalJsonExprSubtrans(ExecEvalJsonExpr, op, econtext, item,
op->resnull, &cxt, cxt.error,
needSubtrans);
if (error)
{
/* Execute ON ERROR behavior */
res = ExecEvalJsonBehavior(econtext, jexpr->on_error,
jsestate->default_on_error,
op->resnull);
/* result is already coerced in DEFAULT behavior case */
if (jexpr->on_error->btype != JSON_BEHAVIOR_DEFAULT)
res = ExecEvalJsonExprCoercion(op, econtext, res,
op->resnull,
NULL, NULL);
}
*op->resvalue = res;
}

View File

@@ -28,7 +28,6 @@
#include "miscadmin.h"
#include "nodes/execnodes.h"
#include "utils/builtins.h"
#include "utils/jsonpath.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
#include "utils/xml.h"
@@ -162,9 +161,8 @@ ExecInitTableFuncScan(TableFuncScan *node, EState *estate, int eflags)
scanstate->ss.ps.qual =
ExecInitQual(node->scan.plan.qual, &scanstate->ss.ps);
/* Only XMLTABLE and JSON_TABLE are supported currently */
scanstate->routine =
tf->functype == TFT_XMLTABLE ? &XmlTableRoutine : &JsonbTableRoutine;
/* Only XMLTABLE is supported currently */
scanstate->routine = &XmlTableRoutine;
scanstate->perTableCxt =
AllocSetContextCreate(CurrentMemoryContext,
@@ -383,17 +381,14 @@ tfuncInitialize(TableFuncScanState *tstate, ExprContext *econtext, Datum doc)
routine->SetNamespace(tstate, ns_name, ns_uri);
}
if (routine->SetRowFilter)
{
/* Install the row filter expression into the table builder context */
value = ExecEvalExpr(tstate->rowexpr, econtext, &isnull);
if (isnull)
ereport(ERROR,
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("row filter expression must not be null")));
/* Install the row filter expression into the table builder context */
value = ExecEvalExpr(tstate->rowexpr, econtext, &isnull);
if (isnull)
ereport(ERROR,
(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
errmsg("row filter expression must not be null")));
routine->SetRowFilter(tstate, TextDatumGetCString(value));
}
routine->SetRowFilter(tstate, TextDatumGetCString(value));
/*
* Install the column filter expressions into the table builder context.

View File

@@ -2395,24 +2395,6 @@ llvm_compile_expr(ExprState *state)
LLVMBuildBr(b, opblocks[opno + 1]);
break;
case EEOP_JSON_CONSTRUCTOR:
build_EvalXFunc(b, mod, "ExecEvalJsonConstructor",
v_state, op, v_econtext);
LLVMBuildBr(b, opblocks[opno + 1]);
break;
case EEOP_IS_JSON:
build_EvalXFunc(b, mod, "ExecEvalJsonIsPredicate",
v_state, op);
LLVMBuildBr(b, opblocks[opno + 1]);
break;
case EEOP_JSONEXPR:
build_EvalXFunc(b, mod, "ExecEvalJson",
v_state, op, v_econtext);
LLVMBuildBr(b, opblocks[opno + 1]);
break;
case EEOP_LAST:
Assert(false);
break;

View File

@@ -133,9 +133,6 @@ void *referenced_functions[] =
ExecEvalSysVar,
ExecEvalWholeRowVar,
ExecEvalXmlExpr,
ExecEvalJsonConstructor,
ExecEvalJsonIsPredicate,
ExecEvalJson,
MakeExpandedObjectReadOnlyInternal,
slot_getmissingattrs,
slot_getsomeattrs_int,

View File

@@ -19,7 +19,6 @@
#include "catalog/pg_type.h"
#include "nodes/makefuncs.h"
#include "nodes/nodeFuncs.h"
#include "utils/errcodes.h"
#include "utils/lsyscache.h"
@@ -819,124 +818,3 @@ makeVacuumRelation(RangeVar *relation, Oid oid, List *va_cols)
v->va_cols = va_cols;
return v;
}
/*
* makeJsonFormat -
* creates a JsonFormat node
*/
JsonFormat *
makeJsonFormat(JsonFormatType type, JsonEncoding encoding, int location)
{
JsonFormat *jf = makeNode(JsonFormat);
jf->format_type = type;
jf->encoding = encoding;
jf->location = location;
return jf;
}
/*
* makeJsonValueExpr -
* creates a JsonValueExpr node
*/
JsonValueExpr *
makeJsonValueExpr(Expr *expr, JsonFormat *format)
{
JsonValueExpr *jve = makeNode(JsonValueExpr);
jve->raw_expr = expr;
jve->formatted_expr = NULL;
jve->format = format;
return jve;
}
/*
* makeJsonBehavior -
* creates a JsonBehavior node
*/
JsonBehavior *
makeJsonBehavior(JsonBehaviorType type, Node *default_expr)
{
JsonBehavior *behavior = makeNode(JsonBehavior);
behavior->btype = type;
behavior->default_expr = default_expr;
return behavior;
}
/*
* makeJsonTableJoinedPlan -
* creates a joined JsonTablePlan node
*/
Node *
makeJsonTableJoinedPlan(JsonTablePlanJoinType type, Node *plan1, Node *plan2,
int location)
{
JsonTablePlan *n = makeNode(JsonTablePlan);
n->plan_type = JSTP_JOINED;
n->join_type = type;
n->plan1 = castNode(JsonTablePlan, plan1);
n->plan2 = castNode(JsonTablePlan, plan2);
n->location = location;
return (Node *) n;
}
/*
* makeJsonEncoding -
* converts JSON encoding name to enum JsonEncoding
*/
JsonEncoding
makeJsonEncoding(char *name)
{
if (!pg_strcasecmp(name, "utf8"))
return JS_ENC_UTF8;
if (!pg_strcasecmp(name, "utf16"))
return JS_ENC_UTF16;
if (!pg_strcasecmp(name, "utf32"))
return JS_ENC_UTF32;
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("unrecognized JSON encoding: %s", name)));
return JS_ENC_DEFAULT;
}
/*
* makeJsonKeyValue -
* creates a JsonKeyValue node
*/
Node *
makeJsonKeyValue(Node *key, Node *value)
{
JsonKeyValue *n = makeNode(JsonKeyValue);
n->key = (Expr *) key;
n->value = castNode(JsonValueExpr, value);
return (Node *) n;
}
/*
* makeJsonIsPredicate -
* creates a JsonIsPredicate node
*/
Node *
makeJsonIsPredicate(Node *expr, JsonFormat *format, JsonValueType item_type,
bool unique_keys, int location)
{
JsonIsPredicate *n = makeNode(JsonIsPredicate);
n->expr = expr;
n->format = format;
n->item_type = item_type;
n->unique_keys = unique_keys;
n->location = location;
return (Node *) n;
}

View File

@@ -250,25 +250,6 @@ exprType(const Node *expr)
case T_PlaceHolderVar:
type = exprType((Node *) ((const PlaceHolderVar *) expr)->phexpr);
break;
case T_JsonValueExpr:
{
const JsonValueExpr *jve = (const JsonValueExpr *) expr;
type = exprType((Node *) (jve->formatted_expr ? jve->formatted_expr : jve->raw_expr));
}
break;
case T_JsonConstructorExpr:
type = ((const JsonConstructorExpr *) expr)->returning->typid;
break;
case T_JsonIsPredicate:
type = BOOLOID;
break;
case T_JsonExpr:
type = ((const JsonExpr *) expr)->returning->typid;
break;
case T_JsonCoercion:
type = exprType(((const JsonCoercion *) expr)->expr);
break;
default:
elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr));
type = InvalidOid; /* keep compiler quiet */
@@ -501,14 +482,6 @@ exprTypmod(const Node *expr)
return ((const SetToDefault *) expr)->typeMod;
case T_PlaceHolderVar:
return exprTypmod((Node *) ((const PlaceHolderVar *) expr)->phexpr);
case T_JsonValueExpr:
return exprTypmod((Node *) ((const JsonValueExpr *) expr)->formatted_expr);
case T_JsonConstructorExpr:
return ((const JsonConstructorExpr *) expr)->returning->typmod;
case T_JsonExpr:
return ((JsonExpr *) expr)->returning->typmod;
case T_JsonCoercion:
return exprTypmod(((const JsonCoercion *) expr)->expr);
default:
break;
}
@@ -991,37 +964,6 @@ exprCollation(const Node *expr)
case T_PlaceHolderVar:
coll = exprCollation((Node *) ((const PlaceHolderVar *) expr)->phexpr);
break;
case T_JsonValueExpr:
coll = exprCollation((Node *) ((const JsonValueExpr *) expr)->formatted_expr);
break;
case T_JsonConstructorExpr:
{
const JsonConstructorExpr *ctor = (const JsonConstructorExpr *) expr;
if (ctor->coercion)
coll = exprCollation((Node *) ctor->coercion);
else
coll = InvalidOid;
}
break;
case T_JsonIsPredicate:
coll = InvalidOid; /* result is always an boolean type */
break;
case T_JsonExpr:
{
JsonExpr *jexpr = (JsonExpr *) expr;
JsonCoercion *coercion = jexpr->result_coercion;
if (!coercion)
coll = InvalidOid;
else if (coercion->expr)
coll = exprCollation(coercion->expr);
else if (coercion->via_io || coercion->via_populate)
coll = coercion->collation;
else
coll = InvalidOid;
}
break;
default:
elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr));
coll = InvalidOid; /* keep compiler quiet */
@@ -1234,39 +1176,6 @@ exprSetCollation(Node *expr, Oid collation)
/* NextValueExpr's result is an integer type ... */
Assert(!OidIsValid(collation)); /* ... so never set a collation */
break;
case T_JsonValueExpr:
exprSetCollation((Node *) ((JsonValueExpr *) expr)->formatted_expr,
collation);
break;
case T_JsonConstructorExpr:
{
JsonConstructorExpr *ctor = (JsonConstructorExpr *) expr;
if (ctor->coercion)
exprSetCollation((Node *) ctor->coercion, collation);
else
Assert(!OidIsValid(collation)); /* result is always a
* json[b] type */
}
break;
case T_JsonIsPredicate:
Assert(!OidIsValid(collation)); /* result is always boolean */
break;
case T_JsonExpr:
{
JsonExpr *jexpr = (JsonExpr *) expr;
JsonCoercion *coercion = jexpr->result_coercion;
if (!coercion)
Assert(!OidIsValid(collation));
else if (coercion->expr)
exprSetCollation(coercion->expr, collation);
else if (coercion->via_io || coercion->via_populate)
coercion->collation = collation;
else
Assert(!OidIsValid(collation));
}
break;
default:
elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr));
break;
@@ -1713,24 +1622,6 @@ exprLocation(const Node *expr)
case T_PartitionRangeDatum:
loc = ((const PartitionRangeDatum *) expr)->location;
break;
case T_JsonValueExpr:
loc = exprLocation((Node *) ((const JsonValueExpr *) expr)->raw_expr);
break;
case T_JsonConstructorExpr:
loc = ((const JsonConstructorExpr *) expr)->location;
break;
case T_JsonIsPredicate:
loc = ((const JsonIsPredicate *) expr)->location;
break;
case T_JsonExpr:
{
const JsonExpr *jsexpr = (const JsonExpr *) expr;
/* consider both function name and leftmost arg */
loc = leftmostLoc(jsexpr->location,
exprLocation(jsexpr->formatted_expr));
}
break;
default:
/* for any other node type it's just unknown... */
loc = -1;
@@ -2473,80 +2364,6 @@ expression_tree_walker(Node *node,
return true;
if (walker(tf->coldefexprs, context))
return true;
if (walker(tf->colvalexprs, context))
return true;
}
break;
case T_JsonValueExpr:
{
JsonValueExpr *jve = (JsonValueExpr *) node;
if (walker(jve->raw_expr, context))
return true;
if (walker(jve->formatted_expr, context))
return true;
}
break;
case T_JsonConstructorExpr:
{
JsonConstructorExpr *ctor = (JsonConstructorExpr *) node;
if (walker(ctor->args, context))
return true;
if (walker(ctor->func, context))
return true;
if (walker(ctor->coercion, context))
return true;
}
break;
case T_JsonIsPredicate:
return walker(((JsonIsPredicate *) node)->expr, context);
case T_JsonExpr:
{
JsonExpr *jexpr = (JsonExpr *) node;
if (walker(jexpr->formatted_expr, context))
return true;
if (walker(jexpr->result_coercion, context))
return true;
if (walker(jexpr->passing_values, context))
return true;
/* we assume walker doesn't care about passing_names */
if (jexpr->on_empty &&
walker(jexpr->on_empty->default_expr, context))
return true;
if (walker(jexpr->on_error->default_expr, context))
return true;
if (walker(jexpr->coercions, context))
return true;
}
break;
case T_JsonCoercion:
return walker(((JsonCoercion *) node)->expr, context);
case T_JsonItemCoercions:
{
JsonItemCoercions *coercions = (JsonItemCoercions *) node;
if (walker(coercions->null, context))
return true;
if (walker(coercions->string, context))
return true;
if (walker(coercions->numeric, context))
return true;
if (walker(coercions->boolean, context))
return true;
if (walker(coercions->date, context))
return true;
if (walker(coercions->time, context))
return true;
if (walker(coercions->timetz, context))
return true;
if (walker(coercions->timestamp, context))
return true;
if (walker(coercions->timestamptz, context))
return true;
if (walker(coercions->composite, context))
return true;
}
break;
default:
@@ -2876,7 +2693,6 @@ expression_tree_mutator(Node *node,
case T_RangeTblRef:
case T_SortGroupClause:
case T_CTESearchClause:
case T_JsonFormat:
return (Node *) copyObject(node);
case T_WithCheckOption:
{
@@ -3517,102 +3333,6 @@ expression_tree_mutator(Node *node,
MUTATE(newnode->rowexpr, tf->rowexpr, Node *);
MUTATE(newnode->colexprs, tf->colexprs, List *);
MUTATE(newnode->coldefexprs, tf->coldefexprs, List *);
MUTATE(newnode->colvalexprs, tf->colvalexprs, List *);
return (Node *) newnode;
}
break;
case T_JsonReturning:
{
JsonReturning *jr = (JsonReturning *) node;
JsonReturning *newnode;
FLATCOPY(newnode, jr, JsonReturning);
MUTATE(newnode->format, jr->format, JsonFormat *);
return (Node *) newnode;
}
case T_JsonValueExpr:
{
JsonValueExpr *jve = (JsonValueExpr *) node;
JsonValueExpr *newnode;
FLATCOPY(newnode, jve, JsonValueExpr);
MUTATE(newnode->raw_expr, jve->raw_expr, Expr *);
MUTATE(newnode->formatted_expr, jve->formatted_expr, Expr *);
MUTATE(newnode->format, jve->format, JsonFormat *);
return (Node *) newnode;
}
case T_JsonConstructorExpr:
{
JsonConstructorExpr *jve = (JsonConstructorExpr *) node;
JsonConstructorExpr *newnode;
FLATCOPY(newnode, jve, JsonConstructorExpr);
MUTATE(newnode->args, jve->args, List *);
MUTATE(newnode->func, jve->func, Expr *);
MUTATE(newnode->coercion, jve->coercion, Expr *);
MUTATE(newnode->returning, jve->returning, JsonReturning *);
return (Node *) newnode;
}
break;
case T_JsonIsPredicate:
{
JsonIsPredicate *pred = (JsonIsPredicate *) node;
JsonIsPredicate *newnode;
FLATCOPY(newnode, pred, JsonIsPredicate);
MUTATE(newnode->expr, pred->expr, Node *);
return (Node *) newnode;
}
break;
case T_JsonExpr:
{
JsonExpr *jexpr = (JsonExpr *) node;
JsonExpr *newnode;
FLATCOPY(newnode, jexpr, JsonExpr);
MUTATE(newnode->path_spec, jexpr->path_spec, Node *);
MUTATE(newnode->formatted_expr, jexpr->formatted_expr, Node *);
MUTATE(newnode->result_coercion, jexpr->result_coercion, JsonCoercion *);
MUTATE(newnode->passing_values, jexpr->passing_values, List *);
/* assume mutator does not care about passing_names */
if (newnode->on_empty)
MUTATE(newnode->on_empty->default_expr,
jexpr->on_empty->default_expr, Node *);
MUTATE(newnode->on_error->default_expr,
jexpr->on_error->default_expr, Node *);
return (Node *) newnode;
}
break;
case T_JsonCoercion:
{
JsonCoercion *coercion = (JsonCoercion *) node;
JsonCoercion *newnode;
FLATCOPY(newnode, coercion, JsonCoercion);
MUTATE(newnode->expr, coercion->expr, Node *);
return (Node *) newnode;
}
break;
case T_JsonItemCoercions:
{
JsonItemCoercions *coercions = (JsonItemCoercions *) node;
JsonItemCoercions *newnode;
FLATCOPY(newnode, coercions, JsonItemCoercions);
MUTATE(newnode->null, coercions->null, JsonCoercion *);
MUTATE(newnode->string, coercions->string, JsonCoercion *);
MUTATE(newnode->numeric, coercions->numeric, JsonCoercion *);
MUTATE(newnode->boolean, coercions->boolean, JsonCoercion *);
MUTATE(newnode->date, coercions->date, JsonCoercion *);
MUTATE(newnode->time, coercions->time, JsonCoercion *);
MUTATE(newnode->timetz, coercions->timetz, JsonCoercion *);
MUTATE(newnode->timestamp, coercions->timestamp, JsonCoercion *);
MUTATE(newnode->timestamptz, coercions->timestamptz, JsonCoercion *);
MUTATE(newnode->composite, coercions->composite, JsonCoercion *);
return (Node *) newnode;
}
break;
@@ -3888,7 +3608,6 @@ raw_expression_tree_walker(Node *node,
case T_ParamRef:
case T_A_Const:
case T_A_Star:
case T_JsonFormat:
/* primitive node types with no subnodes */
break;
case T_Alias:
@@ -4351,211 +4070,6 @@ raw_expression_tree_walker(Node *node,
case T_CommonTableExpr:
/* search_clause and cycle_clause are not interesting here */
return walker(((CommonTableExpr *) node)->ctequery, context);
case T_JsonReturning:
return walker(((JsonReturning *) node)->format, context);
case T_JsonValueExpr:
{
JsonValueExpr *jve = (JsonValueExpr *) node;
if (walker(jve->raw_expr, context))
return true;
if (walker(jve->formatted_expr, context))
return true;
if (walker(jve->format, context))
return true;
}
break;
case T_JsonParseExpr:
{
JsonParseExpr *jpe = (JsonParseExpr *) node;
if (walker(jpe->expr, context))
return true;
if (walker(jpe->output, context))
return true;
}
break;
case T_JsonScalarExpr:
{
JsonScalarExpr *jse = (JsonScalarExpr *) node;
if (walker(jse->expr, context))
return true;
if (walker(jse->output, context))
return true;
}
break;
case T_JsonSerializeExpr:
{
JsonSerializeExpr *jse = (JsonSerializeExpr *) node;
if (walker(jse->expr, context))
return true;
if (walker(jse->output, context))
return true;
}
break;
case T_JsonConstructorExpr:
{
JsonConstructorExpr *ctor = (JsonConstructorExpr *) node;
if (walker(ctor->args, context))
return true;
if (walker(ctor->func, context))
return true;
if (walker(ctor->coercion, context))
return true;
if (walker(ctor->returning, context))
return true;
}
break;
case T_JsonOutput:
{
JsonOutput *out = (JsonOutput *) node;
if (walker(out->typeName, context))
return true;
if (walker(out->returning, context))
return true;
}
break;
case T_JsonKeyValue:
{
JsonKeyValue *jkv = (JsonKeyValue *) node;
if (walker(jkv->key, context))
return true;
if (walker(jkv->value, context))
return true;
}
break;
case T_JsonObjectConstructor:
{
JsonObjectConstructor *joc = (JsonObjectConstructor *) node;
if (walker(joc->output, context))
return true;
if (walker(joc->exprs, context))
return true;
}
break;
case T_JsonArrayConstructor:
{
JsonArrayConstructor *jac = (JsonArrayConstructor *) node;
if (walker(jac->output, context))
return true;
if (walker(jac->exprs, context))
return true;
}
break;
case T_JsonAggConstructor:
{
JsonAggConstructor *ctor = (JsonAggConstructor *) node;
if (walker(ctor->output, context))
return true;
if (walker(ctor->agg_order, context))
return true;
if (walker(ctor->agg_filter, context))
return true;
if (walker(ctor->over, context))
return true;
}
break;
case T_JsonObjectAgg:
{
JsonObjectAgg *joa = (JsonObjectAgg *) node;
if (walker(joa->constructor, context))
return true;
if (walker(joa->arg, context))
return true;
}
break;
case T_JsonArrayAgg:
{
JsonArrayAgg *jaa = (JsonArrayAgg *) node;
if (walker(jaa->constructor, context))
return true;
if (walker(jaa->arg, context))
return true;
}
break;
case T_JsonArrayQueryConstructor:
{
JsonArrayQueryConstructor *jaqc = (JsonArrayQueryConstructor *) node;
if (walker(jaqc->output, context))
return true;
if (walker(jaqc->query, context))
return true;
}
break;
case T_JsonIsPredicate:
return walker(((JsonIsPredicate *) node)->expr, context);
case T_JsonArgument:
return walker(((JsonArgument *) node)->val, context);
case T_JsonCommon:
{
JsonCommon *jc = (JsonCommon *) node;
if (walker(jc->expr, context))
return true;
if (walker(jc->pathspec, context))
return true;
if (walker(jc->passing, context))
return true;
}
break;
case T_JsonBehavior:
{
JsonBehavior *jb = (JsonBehavior *) node;
if (jb->btype == JSON_BEHAVIOR_DEFAULT &&
walker(jb->default_expr, context))
return true;
}
break;
case T_JsonFuncExpr:
{
JsonFuncExpr *jfe = (JsonFuncExpr *) node;
if (walker(jfe->common, context))
return true;
if (jfe->output && walker(jfe->output, context))
return true;
if (walker(jfe->on_empty, context))
return true;
if (walker(jfe->on_error, context))
return true;
}
break;
case T_JsonTable:
{
JsonTable *jt = (JsonTable *) node;
if (walker(jt->common, context))
return true;
if (walker(jt->columns, context))
return true;
}
break;
case T_JsonTableColumn:
{
JsonTableColumn *jtc = (JsonTableColumn *) node;
if (walker(jtc->typeName, context))
return true;
if (walker(jtc->on_empty, context))
return true;
if (walker(jtc->on_error, context))
return true;
if (jtc->coltype == JTC_NESTED && walker(jtc->columns, context))
return true;
}
break;
default:
elog(ERROR, "unrecognized node type: %d",
(int) nodeTag(node));

View File

@@ -4923,8 +4923,7 @@ cost_qual_eval_walker(Node *node, cost_qual_eval_context *context)
IsA(node, SQLValueFunction) ||
IsA(node, XmlExpr) ||
IsA(node, CoerceToDomain) ||
IsA(node, NextValueExpr) ||
IsA(node, JsonExpr))
IsA(node, NextValueExpr))
{
/* Treat all these as having cost 1 */
context->total.per_tuple += cpu_operator_cost;

View File

@@ -28,7 +28,6 @@
#include "catalog/pg_type.h"
#include "executor/executor.h"
#include "executor/functions.h"
#include "executor/execExpr.h"
#include "funcapi.h"
#include "miscadmin.h"
#include "nodes/makefuncs.h"
@@ -51,9 +50,6 @@
#include "utils/builtins.h"
#include "utils/datum.h"
#include "utils/fmgroids.h"
#include "utils/json.h"
#include "utils/jsonb.h"
#include "utils/jsonpath.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
#include "utils/syscache.h"
@@ -386,45 +382,6 @@ contain_mutable_functions_walker(Node *node, void *context)
context))
return true;
if (IsA(node, JsonConstructorExpr))
{
const JsonConstructorExpr *ctor = (JsonConstructorExpr *) node;
ListCell *lc;
bool is_jsonb =
ctor->returning->format->format_type == JS_FORMAT_JSONB;
/* Check argument_type => json[b] conversions */
foreach(lc, ctor->args)
{
Oid typid = exprType(lfirst(lc));
if (is_jsonb ?
!to_jsonb_is_immutable(typid) :
!to_json_is_immutable(typid))
return true;
}
/* Check all subnodes */
}
if (IsA(node, JsonExpr))
{
JsonExpr *jexpr = castNode(JsonExpr, node);
Const *cnst;
if (!IsA(jexpr->path_spec, Const))
return true;
cnst = castNode(Const, jexpr->path_spec);
Assert(cnst->consttype == JSONPATHOID);
if (cnst->constisnull)
return false;
return jspIsMutable(DatumGetJsonPathP(cnst->constvalue),
jexpr->passing_names, jexpr->passing_values);
}
if (IsA(node, SQLValueFunction))
{
/* all variants of SQLValueFunction are stable */
@@ -896,18 +853,6 @@ max_parallel_hazard_walker(Node *node, max_parallel_hazard_context *context)
context, 0);
}
/* JsonExpr is parallel-unsafe if subtransactions can be used. */
else if (IsA(node, JsonExpr))
{
JsonExpr *jsexpr = (JsonExpr *) node;
if (ExecEvalJsonNeedsSubTransaction(jsexpr, NULL))
{
context->max_hazard = PROPARALLEL_UNSAFE;
return true;
}
}
/* Recurse to check arguments */
return expression_tree_walker(node,
max_parallel_hazard_walker,
@@ -3567,29 +3512,6 @@ eval_const_expressions_mutator(Node *node,
return ece_evaluate_expr((Node *) newcre);
return (Node *) newcre;
}
case T_JsonValueExpr:
{
JsonValueExpr *jve = (JsonValueExpr *) node;
Node *raw = eval_const_expressions_mutator((Node *) jve->raw_expr,
context);
if (raw && IsA(raw, Const))
{
Node *formatted;
Node *save_case_val = context->case_val;
context->case_val = raw;
formatted = eval_const_expressions_mutator((Node *) jve->formatted_expr,
context);
context->case_val = save_case_val;
if (formatted && IsA(formatted, Const))
return formatted;
}
break;
}
default:
break;
}

View File

@@ -23,7 +23,6 @@ OBJS = \
parse_enr.o \
parse_expr.o \
parse_func.o \
parse_jsontable.o \
parse_merge.o \
parse_node.o \
parse_oper.o \

File diff suppressed because it is too large Load Diff

View File

@@ -690,9 +690,7 @@ transformRangeTableFunc(ParseState *pstate, RangeTableFunc *rtf)
char **names;
int colno;
/* Currently only XMLTABLE and JSON_TABLE are supported */
tf->functype = TFT_XMLTABLE;
/* Currently only XMLTABLE is supported */
constructName = "XMLTABLE";
docType = XMLOID;
@@ -1099,17 +1097,13 @@ transformFromClauseItem(ParseState *pstate, Node *n,
rtr->rtindex = nsitem->p_rtindex;
return (Node *) rtr;
}
else if (IsA(n, RangeTableFunc) || IsA(n, JsonTable))
else if (IsA(n, RangeTableFunc))
{
/* table function is like a plain relation */
RangeTblRef *rtr;
ParseNamespaceItem *nsitem;
if (IsA(n, RangeTableFunc))
nsitem = transformRangeTableFunc(pstate, (RangeTableFunc *) n);
else
nsitem = transformJsonTable(pstate, (JsonTable *) n);
nsitem = transformRangeTableFunc(pstate, (RangeTableFunc *) n);
*top_nsitem = nsitem;
*namespace = list_make1(nsitem);
rtr = makeNode(RangeTblRef);

View File

@@ -691,13 +691,6 @@ assign_collations_walker(Node *node, assign_collations_context *context)
&loccontext);
}
break;
case T_JsonExpr:
/*
* Context item and PASSING arguments are already
* marked with collations in parse_expr.c.
*/
break;
default:
/*

File diff suppressed because it is too large Load Diff

View File

@@ -1,732 +0,0 @@
/*-------------------------------------------------------------------------
*
* parse_jsontable.c
* parsing of JSON_TABLE
*
* Portions Copyright (c) 1996-2022, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
*
* IDENTIFICATION
* src/backend/parser/parse_jsontable.c
*
*-------------------------------------------------------------------------
*/
#include "postgres.h"
#include "catalog/pg_collation.h"
#include "catalog/pg_type.h"
#include "miscadmin.h"
#include "nodes/makefuncs.h"
#include "nodes/nodeFuncs.h"
#include "optimizer/optimizer.h"
#include "parser/parse_clause.h"
#include "parser/parse_collate.h"
#include "parser/parse_expr.h"
#include "parser/parse_relation.h"
#include "parser/parse_type.h"
#include "utils/builtins.h"
#include "utils/json.h"
#include "utils/lsyscache.h"
/* Context for JSON_TABLE transformation */
typedef struct JsonTableContext
{
ParseState *pstate; /* parsing state */
JsonTable *table; /* untransformed node */
TableFunc *tablefunc; /* transformed node */
List *pathNames; /* list of all path and columns names */
int pathNameId; /* path name id counter */
Oid contextItemTypid; /* type oid of context item (json/jsonb) */
} JsonTableContext;
static JsonTableParent *transformJsonTableColumns(JsonTableContext *cxt,
JsonTablePlan *plan,
List *columns,
char *pathSpec,
char **pathName,
int location);
static Node *
makeStringConst(char *str, int location)
{
A_Const *n = makeNode(A_Const);
n->val.node.type = T_String;
n->val.sval.sval = str;
n->location = location;
return (Node *) n;
}
/*
* Transform JSON_TABLE column
* - regular column into JSON_VALUE()
* - FORMAT JSON column into JSON_QUERY()
* - EXISTS column into JSON_EXISTS()
*/
static Node *
transformJsonTableColumn(JsonTableColumn *jtc, Node *contextItemExpr,
List *passingArgs, bool errorOnError)
{
JsonFuncExpr *jfexpr = makeNode(JsonFuncExpr);
JsonCommon *common = makeNode(JsonCommon);
JsonOutput *output = makeNode(JsonOutput);
char *pathspec;
JsonFormat *default_format;
jfexpr->op =
jtc->coltype == JTC_REGULAR ? JSON_VALUE_OP :
jtc->coltype == JTC_EXISTS ? JSON_EXISTS_OP : JSON_QUERY_OP;
jfexpr->common = common;
jfexpr->output = output;
jfexpr->on_empty = jtc->on_empty;
jfexpr->on_error = jtc->on_error;
if (!jfexpr->on_error && errorOnError)
jfexpr->on_error = makeJsonBehavior(JSON_BEHAVIOR_ERROR, NULL);
jfexpr->omit_quotes = jtc->omit_quotes;
jfexpr->wrapper = jtc->wrapper;
jfexpr->location = jtc->location;
output->typeName = jtc->typeName;
output->returning = makeNode(JsonReturning);
output->returning->format = jtc->format;
default_format = makeJsonFormat(JS_FORMAT_DEFAULT, JS_ENC_DEFAULT, -1);
common->pathname = NULL;
common->expr = makeJsonValueExpr((Expr *) contextItemExpr, default_format);
common->passing = passingArgs;
if (jtc->pathspec)
pathspec = jtc->pathspec;
else
{
/* Construct default path as '$."column_name"' */
StringInfoData path;
initStringInfo(&path);
appendStringInfoString(&path, "$.");
escape_json(&path, jtc->name);
pathspec = path.data;
}
common->pathspec = makeStringConst(pathspec, -1);
return (Node *) jfexpr;
}
static bool
isJsonTablePathNameDuplicate(JsonTableContext *cxt, const char *pathname)
{
ListCell *lc;
foreach(lc, cxt->pathNames)
{
if (!strcmp(pathname, (const char *) lfirst(lc)))
return true;
}
return false;
}
/* Register the column name in the path name list. */
static void
registerJsonTableColumn(JsonTableContext *cxt, char *colname)
{
if (isJsonTablePathNameDuplicate(cxt, colname))
ereport(ERROR,
(errcode(ERRCODE_DUPLICATE_ALIAS),
errmsg("duplicate JSON_TABLE column name: %s", colname),
errhint("JSON_TABLE column names must be distinct from one another.")));
cxt->pathNames = lappend(cxt->pathNames, colname);
}
/* Recursively register all nested column names in the path name list. */
static void
registerAllJsonTableColumns(JsonTableContext *cxt, List *columns)
{
ListCell *lc;
foreach(lc, columns)
{
JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc));
if (jtc->coltype == JTC_NESTED)
{
if (jtc->pathname)
registerJsonTableColumn(cxt, jtc->pathname);
registerAllJsonTableColumns(cxt, jtc->columns);
}
else
{
registerJsonTableColumn(cxt, jtc->name);
}
}
}
/* Generate a new unique JSON_TABLE path name. */
static char *
generateJsonTablePathName(JsonTableContext *cxt)
{
char namebuf[32];
char *name = namebuf;
do
{
snprintf(namebuf, sizeof(namebuf), "json_table_path_%d",
++cxt->pathNameId);
} while (isJsonTablePathNameDuplicate(cxt, name));
name = pstrdup(name);
cxt->pathNames = lappend(cxt->pathNames, name);
return name;
}
/* Collect sibling path names from plan to the specified list. */
static void
collectSiblingPathsInJsonTablePlan(JsonTablePlan *plan, List **paths)
{
if (plan->plan_type == JSTP_SIMPLE)
*paths = lappend(*paths, plan->pathname);
else if (plan->plan_type == JSTP_JOINED)
{
if (plan->join_type == JSTPJ_INNER ||
plan->join_type == JSTPJ_OUTER)
{
Assert(plan->plan1->plan_type == JSTP_SIMPLE);
*paths = lappend(*paths, plan->plan1->pathname);
}
else if (plan->join_type == JSTPJ_CROSS ||
plan->join_type == JSTPJ_UNION)
{
collectSiblingPathsInJsonTablePlan(plan->plan1, paths);
collectSiblingPathsInJsonTablePlan(plan->plan2, paths);
}
else
elog(ERROR, "invalid JSON_TABLE join type %d",
plan->join_type);
}
}
/*
* Validate child JSON_TABLE plan by checking that:
* - all nested columns have path names specified
* - all nested columns have corresponding node in the sibling plan
* - plan does not contain duplicate or extra nodes
*/
static void
validateJsonTableChildPlan(ParseState *pstate, JsonTablePlan *plan,
List *columns)
{
ListCell *lc1;
List *siblings = NIL;
int nchildren = 0;
if (plan)
collectSiblingPathsInJsonTablePlan(plan, &siblings);
foreach(lc1, columns)
{
JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc1));
if (jtc->coltype == JTC_NESTED)
{
ListCell *lc2;
bool found = false;
if (!jtc->pathname)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("nested JSON_TABLE columns must contain an explicit AS pathname specification if an explicit PLAN clause is used"),
parser_errposition(pstate, jtc->location)));
/* find nested path name in the list of sibling path names */
foreach(lc2, siblings)
{
if ((found = !strcmp(jtc->pathname, lfirst(lc2))))
break;
}
if (!found)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("invalid JSON_TABLE plan"),
errdetail("Plan node for nested path %s was not found in plan.", jtc->pathname),
parser_errposition(pstate, jtc->location)));
nchildren++;
}
}
if (list_length(siblings) > nchildren)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("invalid JSON_TABLE plan"),
errdetail("Plan node contains some extra or duplicate sibling nodes."),
parser_errposition(pstate, plan ? plan->location : -1)));
}
static JsonTableColumn *
findNestedJsonTableColumn(List *columns, const char *pathname)
{
ListCell *lc;
foreach(lc, columns)
{
JsonTableColumn *jtc = castNode(JsonTableColumn, lfirst(lc));
if (jtc->coltype == JTC_NESTED &&
jtc->pathname &&
!strcmp(jtc->pathname, pathname))
return jtc;
}
return NULL;
}
static Node *
transformNestedJsonTableColumn(JsonTableContext *cxt, JsonTableColumn *jtc,
JsonTablePlan *plan)
{
JsonTableParent *node;
char *pathname = jtc->pathname;
node = transformJsonTableColumns(cxt, plan, jtc->columns, jtc->pathspec,
&pathname, jtc->location);
node->name = pstrdup(pathname);
return (Node *) node;
}
static Node *
makeJsonTableSiblingJoin(bool cross, Node *lnode, Node *rnode)
{
JsonTableSibling *join = makeNode(JsonTableSibling);
join->larg = lnode;
join->rarg = rnode;
join->cross = cross;
return (Node *) join;
}
/*
* Recursively transform child JSON_TABLE plan.
*
* Default plan is transformed into a cross/union join of its nested columns.
* Simple and outer/inner plans are transformed into a JsonTableParent by
* finding and transforming corresponding nested column.
* Sibling plans are recursively transformed into a JsonTableSibling.
*/
static Node *
transformJsonTableChildPlan(JsonTableContext *cxt, JsonTablePlan *plan,
List *columns)
{
JsonTableColumn *jtc = NULL;
if (!plan || plan->plan_type == JSTP_DEFAULT)
{
/* unspecified or default plan */
Node *res = NULL;
ListCell *lc;
bool cross = plan && (plan->join_type & JSTPJ_CROSS);
/* transform all nested columns into cross/union join */
foreach(lc, columns)
{
JsonTableColumn *col = castNode(JsonTableColumn, lfirst(lc));
Node *node;
if (col->coltype != JTC_NESTED)
continue;
node = transformNestedJsonTableColumn(cxt, col, plan);
/* join transformed node with previous sibling nodes */
res = res ? makeJsonTableSiblingJoin(cross, res, node) : node;
}
return res;
}
else if (plan->plan_type == JSTP_SIMPLE)
{
jtc = findNestedJsonTableColumn(columns, plan->pathname);
}
else if (plan->plan_type == JSTP_JOINED)
{
if (plan->join_type == JSTPJ_INNER ||
plan->join_type == JSTPJ_OUTER)
{
Assert(plan->plan1->plan_type == JSTP_SIMPLE);
jtc = findNestedJsonTableColumn(columns, plan->plan1->pathname);
}
else
{
Node *node1 = transformJsonTableChildPlan(cxt, plan->plan1,
columns);
Node *node2 = transformJsonTableChildPlan(cxt, plan->plan2,
columns);
return makeJsonTableSiblingJoin(plan->join_type == JSTPJ_CROSS,
node1, node2);
}
}
else
elog(ERROR, "invalid JSON_TABLE plan type %d", plan->plan_type);
if (!jtc)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("invalid JSON_TABLE plan"),
errdetail("Path name was %s not found in nested columns list.",
plan->pathname),
parser_errposition(cxt->pstate, plan->location)));
return transformNestedJsonTableColumn(cxt, jtc, plan);
}
/* Check whether type is json/jsonb, array, or record. */
static bool
typeIsComposite(Oid typid)
{
char typtype;
if (typid == JSONOID ||
typid == JSONBOID ||
typid == RECORDOID ||
type_is_array(typid))
return true;
typtype = get_typtype(typid);
if (typtype == TYPTYPE_COMPOSITE)
return true;
if (typtype == TYPTYPE_DOMAIN)
return typeIsComposite(getBaseType(typid));
return false;
}
/* Append transformed non-nested JSON_TABLE columns to the TableFunc node */
static void
appendJsonTableColumns(JsonTableContext *cxt, List *columns)
{
ListCell *col;
ParseState *pstate = cxt->pstate;
JsonTable *jt = cxt->table;
TableFunc *tf = cxt->tablefunc;
bool errorOnError = jt->on_error &&
jt->on_error->btype == JSON_BEHAVIOR_ERROR;
foreach(col, columns)
{
JsonTableColumn *rawc = castNode(JsonTableColumn, lfirst(col));
Oid typid;
int32 typmod;
Node *colexpr;
if (rawc->name)
{
/* make sure column names are unique */
ListCell *colname;
foreach(colname, tf->colnames)
if (!strcmp((const char *) colname, rawc->name))
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("column name \"%s\" is not unique",
rawc->name),
parser_errposition(pstate, rawc->location)));
tf->colnames = lappend(tf->colnames,
makeString(pstrdup(rawc->name)));
}
/*
* Determine the type and typmod for the new column. FOR ORDINALITY
* columns are INTEGER by standard; the others are user-specified.
*/
switch (rawc->coltype)
{
case JTC_FOR_ORDINALITY:
colexpr = NULL;
typid = INT4OID;
typmod = -1;
break;
case JTC_REGULAR:
typenameTypeIdAndMod(pstate, rawc->typeName, &typid, &typmod);
/*
* Use implicit FORMAT JSON for composite types (arrays and
* records)
*/
if (typeIsComposite(typid))
rawc->coltype = JTC_FORMATTED;
else if (rawc->wrapper != JSW_NONE)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("cannot use WITH WRAPPER clause with scalar columns"),
parser_errposition(pstate, rawc->location)));
else if (rawc->omit_quotes)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("cannot use OMIT QUOTES clause with scalar columns"),
parser_errposition(pstate, rawc->location)));
/* FALLTHROUGH */
case JTC_EXISTS:
case JTC_FORMATTED:
{
Node *je;
CaseTestExpr *param = makeNode(CaseTestExpr);
param->collation = InvalidOid;
param->typeId = cxt->contextItemTypid;
param->typeMod = -1;
je = transformJsonTableColumn(rawc, (Node *) param,
NIL, errorOnError);
colexpr = transformExpr(pstate, je, EXPR_KIND_FROM_FUNCTION);
assign_expr_collations(pstate, colexpr);
typid = exprType(colexpr);
typmod = exprTypmod(colexpr);
break;
}
case JTC_NESTED:
continue;
default:
elog(ERROR, "unknown JSON_TABLE column type: %d", rawc->coltype);
break;
}
tf->coltypes = lappend_oid(tf->coltypes, typid);
tf->coltypmods = lappend_int(tf->coltypmods, typmod);
tf->colcollations = lappend_oid(tf->colcollations, get_typcollation(typid));
tf->colvalexprs = lappend(tf->colvalexprs, colexpr);
}
}
/*
* Create transformed JSON_TABLE parent plan node by appending all non-nested
* columns to the TableFunc node and remembering their indices in the
* colvalexprs list.
*/
static JsonTableParent *
makeParentJsonTableNode(JsonTableContext *cxt, char *pathSpec, List *columns)
{
JsonTableParent *node = makeNode(JsonTableParent);
node->path = makeConst(JSONPATHOID, -1, InvalidOid, -1,
DirectFunctionCall1(jsonpath_in,
CStringGetDatum(pathSpec)),
false, false);
/* save start of column range */
node->colMin = list_length(cxt->tablefunc->colvalexprs);
appendJsonTableColumns(cxt, columns);
/* save end of column range */
node->colMax = list_length(cxt->tablefunc->colvalexprs) - 1;
node->errorOnError =
cxt->table->on_error &&
cxt->table->on_error->btype == JSON_BEHAVIOR_ERROR;
return node;
}
static JsonTableParent *
transformJsonTableColumns(JsonTableContext *cxt, JsonTablePlan *plan,
List *columns, char *pathSpec, char **pathName,
int location)
{
JsonTableParent *node;
JsonTablePlan *childPlan;
bool defaultPlan = !plan || plan->plan_type == JSTP_DEFAULT;
if (!*pathName)
{
if (cxt->table->plan)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("invalid JSON_TABLE expression"),
errdetail("JSON_TABLE columns must contain "
"explicit AS pathname specification if "
"explicit PLAN clause is used"),
parser_errposition(cxt->pstate, location)));
*pathName = generateJsonTablePathName(cxt);
}
if (defaultPlan)
childPlan = plan;
else
{
/* validate parent and child plans */
JsonTablePlan *parentPlan;
if (plan->plan_type == JSTP_JOINED)
{
if (plan->join_type != JSTPJ_INNER &&
plan->join_type != JSTPJ_OUTER)
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("invalid JSON_TABLE plan"),
errdetail("Expected INNER or OUTER JSON_TABLE plan node."),
parser_errposition(cxt->pstate, plan->location)));
parentPlan = plan->plan1;
childPlan = plan->plan2;
Assert(parentPlan->plan_type != JSTP_JOINED);
Assert(parentPlan->pathname);
}
else
{
parentPlan = plan;
childPlan = NULL;
}
if (strcmp(parentPlan->pathname, *pathName))
ereport(ERROR,
(errcode(ERRCODE_SYNTAX_ERROR),
errmsg("invalid JSON_TABLE plan"),
errdetail("Path name mismatch: expected %s but %s is given.",
*pathName, parentPlan->pathname),
parser_errposition(cxt->pstate, plan->location)));
validateJsonTableChildPlan(cxt->pstate, childPlan, columns);
}
/* transform only non-nested columns */
node = makeParentJsonTableNode(cxt, pathSpec, columns);
node->name = pstrdup(*pathName);
if (childPlan || defaultPlan)
{
/* transform recursively nested columns */
node->child = transformJsonTableChildPlan(cxt, childPlan, columns);
if (node->child)
node->outerJoin = !plan || (plan->join_type & JSTPJ_OUTER);
/* else: default plan case, no children found */
}
return node;
}
/*
* transformJsonTable -
* Transform a raw JsonTable into TableFunc.
*
* Transform the document-generating expression, the row-generating expression,
* the column-generating expressions, and the default value expressions.
*/
ParseNamespaceItem *
transformJsonTable(ParseState *pstate, JsonTable *jt)
{
JsonTableContext cxt;
TableFunc *tf = makeNode(TableFunc);
JsonFuncExpr *jfe = makeNode(JsonFuncExpr);
JsonTablePlan *plan = jt->plan;
JsonCommon *jscommon;
char *rootPathName = jt->common->pathname;
char *rootPath;
bool is_lateral;
cxt.pstate = pstate;
cxt.table = jt;
cxt.tablefunc = tf;
cxt.pathNames = NIL;
cxt.pathNameId = 0;
if (rootPathName)
registerJsonTableColumn(&cxt, rootPathName);
registerAllJsonTableColumns(&cxt, jt->columns);
#if 0 /* XXX it' unclear from the standard whether
* root path name is mandatory or not */
if (plan && plan->plan_type != JSTP_DEFAULT && !rootPathName)
{
/* Assign root path name and create corresponding plan node */
JsonTablePlan *rootNode = makeNode(JsonTablePlan);
JsonTablePlan *rootPlan = (JsonTablePlan *)
makeJsonTableJoinedPlan(JSTPJ_OUTER, (Node *) rootNode,
(Node *) plan, jt->location);
rootPathName = generateJsonTablePathName(&cxt);
rootNode->plan_type = JSTP_SIMPLE;
rootNode->pathname = rootPathName;
plan = rootPlan;
}
#endif
jscommon = copyObject(jt->common);
jscommon->pathspec = makeStringConst(pstrdup("$"), -1);
jfe->op = JSON_TABLE_OP;
jfe->common = jscommon;
jfe->on_error = jt->on_error;
jfe->location = jt->common->location;
/*
* We make lateral_only names of this level visible, whether or not the
* RangeTableFunc is explicitly marked LATERAL. This is needed for SQL
* spec compliance and seems useful on convenience grounds for all
* functions in FROM.
*
* (LATERAL can't nest within a single pstate level, so we don't need
* save/restore logic here.)
*/
Assert(!pstate->p_lateral_active);
pstate->p_lateral_active = true;
tf->functype = TFT_JSON_TABLE;
tf->docexpr = transformExpr(pstate, (Node *) jfe, EXPR_KIND_FROM_FUNCTION);
cxt.contextItemTypid = exprType(tf->docexpr);
if (!IsA(jt->common->pathspec, A_Const) ||
castNode(A_Const, jt->common->pathspec)->val.node.type != T_String)
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
errmsg("only string constants supported in JSON_TABLE path specification"),
parser_errposition(pstate,
exprLocation(jt->common->pathspec))));
rootPath = castNode(A_Const, jt->common->pathspec)->val.sval.sval;
tf->plan = (Node *) transformJsonTableColumns(&cxt, plan, jt->columns,
rootPath, &rootPathName,
jt->common->location);
tf->ordinalitycol = -1; /* undefine ordinality column number */
tf->location = jt->location;
pstate->p_lateral_active = false;
/*
* Mark the RTE as LATERAL if the user said LATERAL explicitly, or if
* there are any lateral cross-references in it.
*/
is_lateral = jt->lateral || contain_vars_of_level((Node *) tf, 0);
return addRangeTableEntryForTableFunc(pstate,
tf, jt->alias, is_lateral, true);
}

View File

@@ -2017,7 +2017,7 @@ addRangeTableEntryForTableFunc(ParseState *pstate,
bool inFromCl)
{
RangeTblEntry *rte = makeNode(RangeTblEntry);
char *refname;
char *refname = alias ? alias->aliasname : pstrdup("xmltable");
Alias *eref;
int numaliases;
@@ -2035,8 +2035,7 @@ addRangeTableEntryForTableFunc(ParseState *pstate,
Assert(list_length(tf->coltypmods) == list_length(tf->colnames));
Assert(list_length(tf->colcollations) == list_length(tf->colnames));
refname = alias ? alias->aliasname :
pstrdup(tf->functype == TFT_XMLTABLE ? "xmltable" : "json_table");
refname = alias ? alias->aliasname : pstrdup("xmltable");
rte->rtekind = RTE_TABLEFUNC;
rte->relid = InvalidOid;
@@ -2059,7 +2058,7 @@ addRangeTableEntryForTableFunc(ParseState *pstate,
ereport(ERROR,
(errcode(ERRCODE_INVALID_COLUMN_REFERENCE),
errmsg("%s function has %d columns available but %d columns specified",
tf->functype == TFT_XMLTABLE ? "XMLTABLE" : "JSON_TABLE",
"XMLTABLE",
list_length(tf->colnames), numaliases)));
rte->eref = eref;

View File

@@ -1957,46 +1957,6 @@ FigureColnameInternal(Node *node, char **name)
case T_XmlSerialize:
*name = "xmlserialize";
return 2;
case T_JsonParseExpr:
*name = "json";
return 2;
case T_JsonScalarExpr:
*name = "json_scalar";
return 2;
case T_JsonSerializeExpr:
*name = "json_serialize";
return 2;
case T_JsonObjectConstructor:
*name = "json_object";
return 2;
case T_JsonArrayConstructor:
case T_JsonArrayQueryConstructor:
*name = "json_array";
return 2;
case T_JsonObjectAgg:
*name = "json_objectagg";
return 2;
case T_JsonArrayAgg:
*name = "json_arrayagg";
return 2;
case T_JsonFuncExpr:
/* make SQL/JSON functions act like a regular function */
switch (((JsonFuncExpr *) node)->op)
{
case JSON_QUERY_OP:
*name = "json_query";
return 2;
case JSON_VALUE_OP:
*name = "json_value";
return 2;
case JSON_EXISTS_OP:
*name = "json_exists";
return 2;
case JSON_TABLE_OP:
*name = "json_table";
return 2;
}
break;
default:
break;
}

View File

@@ -150,9 +150,6 @@ base_yylex(YYSTYPE *lvalp, YYLTYPE *llocp, core_yyscan_t yyscanner)
case USCONST:
cur_token_length = strlen(yyextra->core_yy_extra.scanbuf + *llocp);
break;
case WITHOUT:
cur_token_length = 7;
break;
default:
return cur_token;
}
@@ -224,19 +221,6 @@ base_yylex(YYSTYPE *lvalp, YYLTYPE *llocp, core_yyscan_t yyscanner)
case ORDINALITY:
cur_token = WITH_LA;
break;
case UNIQUE:
cur_token = WITH_LA_UNIQUE;
break;
}
break;
case WITHOUT:
/* Replace WITHOUT by WITHOUT_LA if it's followed by TIME */
switch (next_token)
{
case TIME:
cur_token = WITHOUT_LA;
break;
}
break;

View File

@@ -294,10 +294,6 @@ format_type_extended(Oid type_oid, int32 typemod, bits16 flags)
else
buf = pstrdup("character varying");
break;
case JSONOID:
buf = pstrdup("json");
break;
}
if (buf == NULL)

View File

@@ -1045,6 +1045,11 @@ typedef struct NUMProc
*L_currency_symbol;
} NUMProc;
/* Return flags for DCH_from_char() */
#define DCH_DATED 0x01
#define DCH_TIMED 0x02
#define DCH_ZONED 0x04
/* ----------
* Functions
* ----------
@@ -6707,43 +6712,3 @@ float8_to_char(PG_FUNCTION_ARGS)
NUM_TOCHAR_finish;
PG_RETURN_TEXT_P(result);
}
int
datetime_format_flags(const char *fmt_str, bool *have_error)
{
bool incache;
int fmt_len = strlen(fmt_str);
int result;
FormatNode *format;
if (fmt_len > DCH_CACHE_SIZE)
{
/*
* Allocate new memory if format picture is bigger than static cache
* and do not use cache (call parser always)
*/
incache = false;
format = (FormatNode *) palloc((fmt_len + 1) * sizeof(FormatNode));
parse_format(format, fmt_str, DCH_keywords,
DCH_suff, DCH_index, DCH_FLAG, NULL);
}
else
{
/*
* Use cache buffers
*/
DCHCacheEntry *ent = DCH_cache_fetch(fmt_str, false);
incache = true;
format = ent->format;
}
result = DCH_datetime_type(format, have_error);
if (!incache)
pfree(format);
return result;
}

View File

@@ -13,10 +13,7 @@
*/
#include "postgres.h"
#include "access/hash.h"
#include "catalog/pg_proc.h"
#include "catalog/pg_type.h"
#include "common/hashfn.h"
#include "funcapi.h"
#include "libpq/pqformat.h"
#include "miscadmin.h"
@@ -30,41 +27,20 @@
#include "utils/lsyscache.h"
#include "utils/typcache.h"
/* Common context for key uniqueness check */
typedef struct HTAB *JsonUniqueCheckState; /* hash table for key names */
/* Hash entry for JsonUniqueCheckState */
typedef struct JsonUniqueHashEntry
typedef enum /* type categories for datum_to_json */
{
const char *key;
int key_len;
int object_id;
} JsonUniqueHashEntry;
/* Context for key uniqueness check in builder functions */
typedef struct JsonUniqueBuilderState
{
JsonUniqueCheckState check; /* unique check */
StringInfoData skipped_keys; /* skipped keys with NULL values */
MemoryContext mcxt; /* context for saving skipped keys */
} JsonUniqueBuilderState;
/* Element of object stack for key uniqueness check during json parsing */
typedef struct JsonUniqueStackEntry
{
struct JsonUniqueStackEntry *parent;
int object_id;
} JsonUniqueStackEntry;
/* State for key uniqueness check during json parsing */
typedef struct JsonUniqueParsingState
{
JsonLexContext *lex;
JsonUniqueCheckState check;
JsonUniqueStackEntry *stack;
int id_counter;
bool unique;
} JsonUniqueParsingState;
JSONTYPE_NULL, /* null, so we didn't bother to identify */
JSONTYPE_BOOL, /* boolean (built-in types only) */
JSONTYPE_NUMERIC, /* numeric (ditto) */
JSONTYPE_DATE, /* we use special formatting for datetimes */
JSONTYPE_TIMESTAMP,
JSONTYPE_TIMESTAMPTZ,
JSONTYPE_JSON, /* JSON itself (and JSONB) */
JSONTYPE_ARRAY, /* array */
JSONTYPE_COMPOSITE, /* composite */
JSONTYPE_CAST, /* something with an explicit cast to JSON */
JSONTYPE_OTHER /* all else */
} JsonTypeCategory;
typedef struct JsonAggState
{
@@ -73,7 +49,6 @@ typedef struct JsonAggState
Oid key_output_func;
JsonTypeCategory val_category;
Oid val_output_func;
JsonUniqueBuilderState unique_check;
} JsonAggState;
static void composite_to_json(Datum composite, StringInfo result,
@@ -84,6 +59,9 @@ static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
bool use_line_feeds);
static void array_to_json_internal(Datum array, StringInfo result,
bool use_line_feeds);
static void json_categorize_type(Oid typoid,
JsonTypeCategory *tcategory,
Oid *outfuncoid);
static void datum_to_json(Datum val, bool is_null, StringInfo result,
JsonTypeCategory tcategory, Oid outfuncoid,
bool key_scalar);
@@ -162,7 +140,7 @@ json_recv(PG_FUNCTION_ARGS)
* output function OID. If the returned category is JSONTYPE_CAST, we
* return the OID of the type->JSON cast function instead.
*/
void
static void
json_categorize_type(Oid typoid,
JsonTypeCategory *tcategory,
Oid *outfuncoid)
@@ -744,48 +722,6 @@ row_to_json_pretty(PG_FUNCTION_ARGS)
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
Datum
to_json_worker(Datum val, JsonTypeCategory tcategory, Oid outfuncoid)
{
StringInfo result = makeStringInfo();
datum_to_json(val, false, result, tcategory, outfuncoid, false);
return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
}
bool
to_json_is_immutable(Oid typoid)
{
JsonTypeCategory tcategory;
Oid outfuncoid;
json_categorize_type(typoid, &tcategory, &outfuncoid);
switch (tcategory)
{
case JSONTYPE_BOOL:
case JSONTYPE_JSON:
return true;
case JSONTYPE_DATE:
case JSONTYPE_TIMESTAMP:
case JSONTYPE_TIMESTAMPTZ:
return false;
case JSONTYPE_ARRAY:
return false; /* TODO recurse into elements */
case JSONTYPE_COMPOSITE:
return false; /* TODO recurse into fields */
case JSONTYPE_NUMERIC:
case JSONTYPE_CAST:
default:
return func_volatile(outfuncoid) == PROVOLATILE_IMMUTABLE;
}
}
/*
* SQL function to_json(anyvalue)
*/
@@ -794,6 +730,7 @@ to_json(PG_FUNCTION_ARGS)
{
Datum val = PG_GETARG_DATUM(0);
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
StringInfo result;
JsonTypeCategory tcategory;
Oid outfuncoid;
@@ -805,7 +742,11 @@ to_json(PG_FUNCTION_ARGS)
json_categorize_type(val_type,
&tcategory, &outfuncoid);
PG_RETURN_DATUM(to_json_worker(val, tcategory, outfuncoid));
result = makeStringInfo();
datum_to_json(val, false, result, tcategory, outfuncoid, false);
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
@@ -813,8 +754,8 @@ to_json(PG_FUNCTION_ARGS)
*
* aggregate input column as a json array value.
*/
static Datum
json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
Datum
json_agg_transfn(PG_FUNCTION_ARGS)
{
MemoryContext aggcontext,
oldcontext;
@@ -854,13 +795,8 @@ json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
else
{
state = (JsonAggState *) PG_GETARG_POINTER(0);
}
if (absent_on_null && PG_ARGISNULL(1))
PG_RETURN_POINTER(state);
if (state->str->len > 1)
appendStringInfoString(state->str, ", ");
}
/* fast path for NULLs */
if (PG_ARGISNULL(1))
@@ -873,7 +809,7 @@ json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
val = PG_GETARG_DATUM(1);
/* add some whitespace if structured type and not first item */
if (!PG_ARGISNULL(0) && state->str->len > 1 &&
if (!PG_ARGISNULL(0) &&
(state->val_category == JSONTYPE_ARRAY ||
state->val_category == JSONTYPE_COMPOSITE))
{
@@ -891,25 +827,6 @@ json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
PG_RETURN_POINTER(state);
}
/*
* json_agg aggregate function
*/
Datum
json_agg_transfn(PG_FUNCTION_ARGS)
{
return json_agg_transfn_worker(fcinfo, false);
}
/*
* json_agg_strict aggregate function
*/
Datum
json_agg_strict_transfn(PG_FUNCTION_ARGS)
{
return json_agg_transfn_worker(fcinfo, true);
}
/*
* json_agg final function
*/
@@ -933,108 +850,18 @@ json_agg_finalfn(PG_FUNCTION_ARGS)
PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, "]"));
}
/* Functions implementing hash table for key uniqueness check */
static uint32
json_unique_hash(const void *key, Size keysize)
{
const JsonUniqueHashEntry *entry = (JsonUniqueHashEntry *) key;
uint32 hash = hash_bytes_uint32(entry->object_id);
hash ^= hash_bytes((const unsigned char *) entry->key, entry->key_len);
return DatumGetUInt32(hash);
}
static int
json_unique_hash_match(const void *key1, const void *key2, Size keysize)
{
const JsonUniqueHashEntry *entry1 = (const JsonUniqueHashEntry *) key1;
const JsonUniqueHashEntry *entry2 = (const JsonUniqueHashEntry *) key2;
if (entry1->object_id != entry2->object_id)
return entry1->object_id > entry2->object_id ? 1 : -1;
if (entry1->key_len != entry2->key_len)
return entry1->key_len > entry2->key_len ? 1 : -1;
return strncmp(entry1->key, entry2->key, entry1->key_len);
}
/* Functions implementing object key uniqueness check */
static void
json_unique_check_init(JsonUniqueCheckState *cxt)
{
HASHCTL ctl;
memset(&ctl, 0, sizeof(ctl));
ctl.keysize = sizeof(JsonUniqueHashEntry);
ctl.entrysize = sizeof(JsonUniqueHashEntry);
ctl.hcxt = CurrentMemoryContext;
ctl.hash = json_unique_hash;
ctl.match = json_unique_hash_match;
*cxt = hash_create("json object hashtable",
32,
&ctl,
HASH_ELEM | HASH_CONTEXT | HASH_FUNCTION | HASH_COMPARE);
}
static bool
json_unique_check_key(JsonUniqueCheckState *cxt, const char *key, int object_id)
{
JsonUniqueHashEntry entry;
bool found;
entry.key = key;
entry.key_len = strlen(key);
entry.object_id = object_id;
(void) hash_search(*cxt, &entry, HASH_ENTER, &found);
return !found;
}
static void
json_unique_builder_init(JsonUniqueBuilderState *cxt)
{
json_unique_check_init(&cxt->check);
cxt->mcxt = CurrentMemoryContext;
cxt->skipped_keys.data = NULL;
}
/* On-demand initialization of skipped_keys StringInfo structure */
static StringInfo
json_unique_builder_get_skipped_keys(JsonUniqueBuilderState *cxt)
{
StringInfo out = &cxt->skipped_keys;
if (!out->data)
{
MemoryContext oldcxt = MemoryContextSwitchTo(cxt->mcxt);
initStringInfo(out);
MemoryContextSwitchTo(oldcxt);
}
return out;
}
/*
* json_object_agg transition function.
*
* aggregate two input columns as a single json object value.
*/
static Datum
json_object_agg_transfn_worker(FunctionCallInfo fcinfo,
bool absent_on_null, bool unique_keys)
Datum
json_object_agg_transfn(PG_FUNCTION_ARGS)
{
MemoryContext aggcontext,
oldcontext;
JsonAggState *state;
StringInfo out;
Datum arg;
bool skip;
int key_offset;
if (!AggCheckCallContext(fcinfo, &aggcontext))
{
@@ -1055,10 +882,6 @@ json_object_agg_transfn_worker(FunctionCallInfo fcinfo,
oldcontext = MemoryContextSwitchTo(aggcontext);
state = (JsonAggState *) palloc(sizeof(JsonAggState));
state->str = makeStringInfo();
if (unique_keys)
json_unique_builder_init(&state->unique_check);
else
memset(&state->unique_check, 0, sizeof(state->unique_check));
MemoryContextSwitchTo(oldcontext);
arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
@@ -1086,6 +909,7 @@ json_object_agg_transfn_worker(FunctionCallInfo fcinfo,
else
{
state = (JsonAggState *) PG_GETARG_POINTER(0);
appendStringInfoString(state->str, ", ");
}
/*
@@ -1101,49 +925,11 @@ json_object_agg_transfn_worker(FunctionCallInfo fcinfo,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("field name must not be null")));
/* Skip null values if absent_on_null */
skip = absent_on_null && PG_ARGISNULL(2);
if (skip)
{
/* If key uniqueness check is needed we must save skipped keys */
if (!unique_keys)
PG_RETURN_POINTER(state);
out = json_unique_builder_get_skipped_keys(&state->unique_check);
}
else
{
out = state->str;
/*
* Append comma delimiter only if we have already outputted some
* fields after the initial string "{ ".
*/
if (out->len > 2)
appendStringInfoString(out, ", ");
}
arg = PG_GETARG_DATUM(1);
key_offset = out->len;
datum_to_json(arg, false, out, state->key_category,
datum_to_json(arg, false, state->str, state->key_category,
state->key_output_func, true);
if (unique_keys)
{
const char *key = &out->data[key_offset];
if (!json_unique_check_key(&state->unique_check.check, key, 0))
ereport(ERROR,
(errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
errmsg("duplicate JSON key %s", key)));
if (skip)
PG_RETURN_POINTER(state);
}
appendStringInfoString(state->str, " : ");
if (PG_ARGISNULL(2))
@@ -1157,42 +943,6 @@ json_object_agg_transfn_worker(FunctionCallInfo fcinfo,
PG_RETURN_POINTER(state);
}
/*
* json_object_agg aggregate function
*/
Datum
json_object_agg_transfn(PG_FUNCTION_ARGS)
{
return json_object_agg_transfn_worker(fcinfo, false, false);
}
/*
* json_object_agg_strict aggregate function
*/
Datum
json_object_agg_strict_transfn(PG_FUNCTION_ARGS)
{
return json_object_agg_transfn_worker(fcinfo, true, false);
}
/*
* json_object_agg_unique aggregate function
*/
Datum
json_object_agg_unique_transfn(PG_FUNCTION_ARGS)
{
return json_object_agg_transfn_worker(fcinfo, false, true);
}
/*
* json_object_agg_unique_strict aggregate function
*/
Datum
json_object_agg_unique_strict_transfn(PG_FUNCTION_ARGS)
{
return json_object_agg_transfn_worker(fcinfo, true, true);
}
/*
* json_object_agg final function.
*/
@@ -1234,14 +984,25 @@ catenate_stringinfo_string(StringInfo buffer, const char *addon)
return result;
}
/*
* SQL function json_build_object(variadic "any")
*/
Datum
json_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
bool absent_on_null, bool unique_keys)
json_build_object(PG_FUNCTION_ARGS)
{
int nargs;
int i;
const char *sep = "";
StringInfo result;
JsonUniqueBuilderState unique_check;
Datum *args;
bool *nulls;
Oid *types;
/* fetch argument values to build the object */
nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
if (nargs % 2 != 0)
ereport(ERROR,
@@ -1255,32 +1016,10 @@ json_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
appendStringInfoChar(result, '{');
if (unique_keys)
json_unique_builder_init(&unique_check);
for (i = 0; i < nargs; i += 2)
{
StringInfo out;
bool skip;
int key_offset;
/* Skip null values if absent_on_null */
skip = absent_on_null && nulls[i + 1];
if (skip)
{
/* If key uniqueness check is needed we must save skipped keys */
if (!unique_keys)
continue;
out = json_unique_builder_get_skipped_keys(&unique_check);
}
else
{
appendStringInfoString(result, sep);
sep = ", ";
out = result;
}
appendStringInfoString(result, sep);
sep = ", ";
/* process key */
if (nulls[i])
@@ -1289,24 +1028,7 @@ json_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
errmsg("argument %d cannot be null", i + 1),
errhint("Object keys should be text.")));
/* save key offset before key appending */
key_offset = out->len;
add_json(args[i], false, out, types[i], true);
if (unique_keys)
{
/* check key uniqueness after key appending */
const char *key = &out->data[key_offset];
if (!json_unique_check_key(&unique_check.check, key, 0))
ereport(ERROR,
(errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
errmsg("duplicate JSON key %s", key)));
if (skip)
continue;
}
add_json(args[i], false, result, types[i], true);
appendStringInfoString(result, " : ");
@@ -1316,27 +1038,7 @@ json_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
appendStringInfoChar(result, '}');
return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
}
/*
* SQL function json_build_object(variadic "any")
*/
Datum
json_build_object(PG_FUNCTION_ARGS)
{
Datum *args;
bool *nulls;
Oid *types;
/* build argument values to build the object */
int nargs = extract_variadic_args(fcinfo, 0, true,
&args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
PG_RETURN_DATUM(json_build_object_worker(nargs, args, nulls, types, false, false));
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
@@ -1348,13 +1050,25 @@ json_build_object_noargs(PG_FUNCTION_ARGS)
PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2));
}
/*
* SQL function json_build_array(variadic "any")
*/
Datum
json_build_array_worker(int nargs, Datum *args, bool *nulls, Oid *types,
bool absent_on_null)
json_build_array(PG_FUNCTION_ARGS)
{
int nargs;
int i;
const char *sep = "";
StringInfo result;
Datum *args;
bool *nulls;
Oid *types;
/* fetch argument values to build the array */
nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
result = makeStringInfo();
@@ -1362,9 +1076,6 @@ json_build_array_worker(int nargs, Datum *args, bool *nulls, Oid *types,
for (i = 0; i < nargs; i++)
{
if (absent_on_null && nulls[i])
continue;
appendStringInfoString(result, sep);
sep = ", ";
add_json(args[i], nulls[i], result, types[i], false);
@@ -1372,27 +1083,7 @@ json_build_array_worker(int nargs, Datum *args, bool *nulls, Oid *types,
appendStringInfoChar(result, ']');
return PointerGetDatum(cstring_to_text_with_len(result->data, result->len));
}
/*
* SQL function json_build_array(variadic "any")
*/
Datum
json_build_array(PG_FUNCTION_ARGS)
{
Datum *args;
bool *nulls;
Oid *types;
/* build argument values to build the object */
int nargs = extract_variadic_args(fcinfo, 0, true,
&args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
PG_RETURN_DATUM(json_build_array_worker(nargs, args, nulls, types, false));
PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
}
/*
@@ -1618,106 +1309,6 @@ escape_json(StringInfo buf, const char *str)
appendStringInfoCharMacro(buf, '"');
}
/* Semantic actions for key uniqueness check */
static void
json_unique_object_start(void *_state)
{
JsonUniqueParsingState *state = _state;
JsonUniqueStackEntry *entry;
if (!state->unique)
return;
/* push object entry to stack */
entry = palloc(sizeof(*entry));
entry->object_id = state->id_counter++;
entry->parent = state->stack;
state->stack = entry;
}
static void
json_unique_object_end(void *_state)
{
JsonUniqueParsingState *state = _state;
JsonUniqueStackEntry *entry;
if (!state->unique)
return;
entry = state->stack;
state->stack = entry->parent; /* pop object from stack */
pfree(entry);
}
static void
json_unique_object_field_start(void *_state, char *field, bool isnull)
{
JsonUniqueParsingState *state = _state;
JsonUniqueStackEntry *entry;
if (!state->unique)
return;
/* find key collision in the current object */
if (json_unique_check_key(&state->check, field, state->stack->object_id))
return;
state->unique = false;
/* pop all objects entries */
while ((entry = state->stack))
{
state->stack = entry->parent;
pfree(entry);
}
}
/* Validate JSON text and additionally check key uniqueness */
bool
json_validate(text *json, bool check_unique_keys, bool throw_error)
{
JsonLexContext *lex = makeJsonLexContext(json, check_unique_keys);
JsonSemAction uniqueSemAction = {0};
JsonUniqueParsingState state;
JsonParseErrorType result;
if (check_unique_keys)
{
state.lex = lex;
state.stack = NULL;
state.id_counter = 0;
state.unique = true;
json_unique_check_init(&state.check);
uniqueSemAction.semstate = &state;
uniqueSemAction.object_start = json_unique_object_start;
uniqueSemAction.object_field_start = json_unique_object_field_start;
uniqueSemAction.object_end = json_unique_object_end;
}
result = pg_parse_json(lex, check_unique_keys ? &uniqueSemAction : &nullSemAction);
if (result != JSON_SUCCESS)
{
if (throw_error)
json_ereport_error(result, lex);
return false; /* invalid json */
}
if (check_unique_keys && !state.unique)
{
if (throw_error)
ereport(ERROR,
(errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
errmsg("duplicate JSON object key value")));
return false; /* not unique keys */
}
return true; /* ok */
}
/*
* SQL function json_typeof(json) -> text
*
@@ -1733,13 +1324,21 @@ json_validate(text *json, bool check_unique_keys, bool throw_error)
Datum
json_typeof(PG_FUNCTION_ARGS)
{
text *json = PG_GETARG_TEXT_PP(0);
char *type;
text *json;
JsonLexContext *lex;
JsonTokenType tok;
char *type;
JsonParseErrorType result;
json = PG_GETARG_TEXT_PP(0);
lex = makeJsonLexContext(json, false);
/* Lex exactly one token from the input and check its type. */
tok = json_get_first_token(json, true);
result = json_lex(lex);
if (result != JSON_SUCCESS)
json_ereport_error(result, lex);
tok = lex->token_type;
switch (tok)
{
case JSON_TOKEN_OBJECT_START:

View File

@@ -14,7 +14,6 @@
#include "access/htup_details.h"
#include "access/transam.h"
#include "catalog/pg_proc.h"
#include "catalog/pg_type.h"
#include "funcapi.h"
#include "libpq/pqformat.h"
@@ -34,9 +33,25 @@ typedef struct JsonbInState
{
JsonbParseState *parseState;
JsonbValue *res;
bool unique_keys;
} JsonbInState;
/* unlike with json categories, we need to treat json and jsonb differently */
typedef enum /* type categories for datum_to_jsonb */
{
JSONBTYPE_NULL, /* null, so we didn't bother to identify */
JSONBTYPE_BOOL, /* boolean (built-in types only) */
JSONBTYPE_NUMERIC, /* numeric (ditto) */
JSONBTYPE_DATE, /* we use special formatting for datetimes */
JSONBTYPE_TIMESTAMP, /* we use special formatting for timestamp */
JSONBTYPE_TIMESTAMPTZ, /* ... and timestamptz */
JSONBTYPE_JSON, /* JSON */
JSONBTYPE_JSONB, /* JSONB */
JSONBTYPE_ARRAY, /* array */
JSONBTYPE_COMPOSITE, /* composite */
JSONBTYPE_JSONCAST, /* something with an explicit cast to JSON */
JSONBTYPE_OTHER /* all else */
} JsonbTypeCategory;
typedef struct JsonbAggState
{
JsonbInState *res;
@@ -46,7 +61,7 @@ typedef struct JsonbAggState
Oid val_output_func;
} JsonbAggState;
static inline Datum jsonb_from_cstring(char *json, int len, bool unique_keys);
static inline Datum jsonb_from_cstring(char *json, int len);
static size_t checkStringLen(size_t len);
static void jsonb_in_object_start(void *pstate);
static void jsonb_in_object_end(void *pstate);
@@ -55,11 +70,17 @@ static void jsonb_in_array_end(void *pstate);
static void jsonb_in_object_field_start(void *pstate, char *fname, bool isnull);
static void jsonb_put_escaped_value(StringInfo out, JsonbValue *scalarVal);
static void jsonb_in_scalar(void *pstate, char *token, JsonTokenType tokentype);
static void jsonb_categorize_type(Oid typoid,
JsonbTypeCategory *tcategory,
Oid *outfuncoid);
static void composite_to_jsonb(Datum composite, JsonbInState *result);
static void array_dim_to_jsonb(JsonbInState *result, int dim, int ndims, int *dims,
Datum *vals, bool *nulls, int *valcount,
JsonbTypeCategory tcategory, Oid outfuncoid);
static void array_to_jsonb_internal(Datum array, JsonbInState *result);
static void jsonb_categorize_type(Oid typoid,
JsonbTypeCategory *tcategory,
Oid *outfuncoid);
static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result,
JsonbTypeCategory tcategory, Oid outfuncoid,
bool key_scalar);
@@ -77,7 +98,7 @@ jsonb_in(PG_FUNCTION_ARGS)
{
char *json = PG_GETARG_CSTRING(0);
return jsonb_from_cstring(json, strlen(json), false);
return jsonb_from_cstring(json, strlen(json));
}
/*
@@ -101,7 +122,7 @@ jsonb_recv(PG_FUNCTION_ARGS)
else
elog(ERROR, "unsupported jsonb version number %d", version);
return jsonb_from_cstring(str, nbytes, false);
return jsonb_from_cstring(str, nbytes);
}
/*
@@ -142,14 +163,6 @@ jsonb_send(PG_FUNCTION_ARGS)
PG_RETURN_BYTEA_P(pq_endtypsend(&buf));
}
Datum
jsonb_from_text(text *js, bool unique_keys)
{
return jsonb_from_cstring(VARDATA_ANY(js),
VARSIZE_ANY_EXHDR(js),
unique_keys);
}
/*
* Get the type name of a jsonb container.
*/
@@ -240,7 +253,7 @@ jsonb_typeof(PG_FUNCTION_ARGS)
* Uses the json parser (with hooks) to construct a jsonb.
*/
static inline Datum
jsonb_from_cstring(char *json, int len, bool unique_keys)
jsonb_from_cstring(char *json, int len)
{
JsonLexContext *lex;
JsonbInState state;
@@ -250,8 +263,6 @@ jsonb_from_cstring(char *json, int len, bool unique_keys)
memset(&sem, 0, sizeof(sem));
lex = makeJsonLexContextCstringLen(json, len, GetDatabaseEncoding(), true);
state.unique_keys = unique_keys;
sem.semstate = (void *) &state;
sem.object_start = jsonb_in_object_start;
@@ -286,7 +297,6 @@ jsonb_in_object_start(void *pstate)
JsonbInState *_state = (JsonbInState *) pstate;
_state->res = pushJsonbValue(&_state->parseState, WJB_BEGIN_OBJECT, NULL);
_state->parseState->unique_keys = _state->unique_keys;
}
static void
@@ -609,7 +619,7 @@ add_indent(StringInfo out, bool indent, int level)
* output function OID. If the returned category is JSONBTYPE_JSONCAST,
* we return the OID of the relevant cast function instead.
*/
void
static void
jsonb_categorize_type(Oid typoid,
JsonbTypeCategory *tcategory,
Oid *outfuncoid)
@@ -1115,51 +1125,6 @@ add_jsonb(Datum val, bool is_null, JsonbInState *result,
datum_to_jsonb(val, is_null, result, tcategory, outfuncoid, key_scalar);
}
Datum
to_jsonb_worker(Datum val, JsonbTypeCategory tcategory, Oid outfuncoid)
{
JsonbInState result;
memset(&result, 0, sizeof(JsonbInState));
datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
return JsonbPGetDatum(JsonbValueToJsonb(result.res));
}
bool
to_jsonb_is_immutable(Oid typoid)
{
JsonbTypeCategory tcategory;
Oid outfuncoid;
jsonb_categorize_type(typoid, &tcategory, &outfuncoid);
switch (tcategory)
{
case JSONBTYPE_BOOL:
case JSONBTYPE_JSON:
case JSONBTYPE_JSONB:
return true;
case JSONBTYPE_DATE:
case JSONBTYPE_TIMESTAMP:
case JSONBTYPE_TIMESTAMPTZ:
return false;
case JSONBTYPE_ARRAY:
return false; /* TODO recurse into elements */
case JSONBTYPE_COMPOSITE:
return false; /* TODO recurse into fields */
case JSONBTYPE_NUMERIC:
case JSONBTYPE_JSONCAST:
default:
return func_volatile(outfuncoid) == PROVOLATILE_IMMUTABLE;
}
}
/*
* SQL function to_jsonb(anyvalue)
*/
@@ -1168,6 +1133,7 @@ to_jsonb(PG_FUNCTION_ARGS)
{
Datum val = PG_GETARG_DATUM(0);
Oid val_type = get_fn_expr_argtype(fcinfo->flinfo, 0);
JsonbInState result;
JsonbTypeCategory tcategory;
Oid outfuncoid;
@@ -1179,15 +1145,31 @@ to_jsonb(PG_FUNCTION_ARGS)
jsonb_categorize_type(val_type,
&tcategory, &outfuncoid);
PG_RETURN_DATUM(to_jsonb_worker(val, tcategory, outfuncoid));
memset(&result, 0, sizeof(JsonbInState));
datum_to_jsonb(val, false, &result, tcategory, outfuncoid, false);
PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
}
/*
* SQL function jsonb_build_object(variadic "any")
*/
Datum
jsonb_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
bool absent_on_null, bool unique_keys)
jsonb_build_object(PG_FUNCTION_ARGS)
{
int nargs;
int i;
JsonbInState result;
Datum *args;
bool *nulls;
Oid *types;
/* build argument values to build the object */
nargs = extract_variadic_args(fcinfo, 0, true, &args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
if (nargs % 2 != 0)
ereport(ERROR,
@@ -1200,26 +1182,15 @@ jsonb_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
memset(&result, 0, sizeof(JsonbInState));
result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL);
result.parseState->unique_keys = unique_keys;
result.parseState->skip_nulls = absent_on_null;
for (i = 0; i < nargs; i += 2)
{
/* process key */
bool skip;
if (nulls[i])
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("argument %d: key must not be null", i + 1)));
/* skip null values if absent_on_null */
skip = absent_on_null && nulls[i + 1];
/* we need to save skipped keys for the key uniqueness check */
if (skip && !unique_keys)
continue;
add_jsonb(args[i], false, &result, types[i], true);
/* process value */
@@ -1228,27 +1199,7 @@ jsonb_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types,
result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL);
return JsonbPGetDatum(JsonbValueToJsonb(result.res));
}
/*
* SQL function jsonb_build_object(variadic "any")
*/
Datum
jsonb_build_object(PG_FUNCTION_ARGS)
{
Datum *args;
bool *nulls;
Oid *types;
/* build argument values to build the object */
int nargs = extract_variadic_args(fcinfo, 0, true,
&args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
PG_RETURN_DATUM(jsonb_build_object_worker(nargs, args, nulls, types, false, false));
PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
}
/*
@@ -1267,50 +1218,36 @@ jsonb_build_object_noargs(PG_FUNCTION_ARGS)
PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
}
Datum
jsonb_build_array_worker(int nargs, Datum *args, bool *nulls, Oid *types,
bool absent_on_null)
{
int i;
JsonbInState result;
memset(&result, 0, sizeof(JsonbInState));
result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
for (i = 0; i < nargs; i++)
{
if (absent_on_null && nulls[i])
continue;
add_jsonb(args[i], nulls[i], &result, types[i], false);
}
result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
return JsonbPGetDatum(JsonbValueToJsonb(result.res));
}
/*
* SQL function jsonb_build_array(variadic "any")
*/
Datum
jsonb_build_array(PG_FUNCTION_ARGS)
{
int nargs;
int i;
JsonbInState result;
Datum *args;
bool *nulls;
Oid *types;
/* build argument values to build the object */
int nargs = extract_variadic_args(fcinfo, 0, true,
&args, &types, &nulls);
/* build argument values to build the array */
nargs = extract_variadic_args(fcinfo, 0, true, &args, &types, &nulls);
if (nargs < 0)
PG_RETURN_NULL();
PG_RETURN_DATUM(jsonb_build_array_worker(nargs, args, nulls, types, false));
}
memset(&result, 0, sizeof(JsonbInState));
result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL);
for (i = 0; i < nargs; i++)
add_jsonb(args[i], nulls[i], &result, types[i], false);
result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL);
PG_RETURN_POINTER(JsonbValueToJsonb(result.res));
}
/*
* degenerate case of jsonb_build_array where it gets 0 arguments.
@@ -1545,8 +1482,6 @@ clone_parse_state(JsonbParseState *state)
{
ocursor->contVal = icursor->contVal;
ocursor->size = icursor->size;
ocursor->unique_keys = icursor->unique_keys;
ocursor->skip_nulls = icursor->skip_nulls;
icursor = icursor->next;
if (icursor == NULL)
break;
@@ -1558,8 +1493,12 @@ clone_parse_state(JsonbParseState *state)
return result;
}
static Datum
jsonb_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
/*
* jsonb_agg aggregate function
*/
Datum
jsonb_agg_transfn(PG_FUNCTION_ARGS)
{
MemoryContext oldcontext,
aggcontext;
@@ -1607,9 +1546,6 @@ jsonb_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
result = state->res;
}
if (absent_on_null && PG_ARGISNULL(1))
PG_RETURN_POINTER(state);
/* turn the argument into jsonb in the normal function context */
val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1);
@@ -1679,24 +1615,6 @@ jsonb_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null)
PG_RETURN_POINTER(state);
}
/*
* jsonb_agg aggregate function
*/
Datum
jsonb_agg_transfn(PG_FUNCTION_ARGS)
{
return jsonb_agg_transfn_worker(fcinfo, false);
}
/*
* jsonb_agg_strict aggregate function
*/
Datum
jsonb_agg_strict_transfn(PG_FUNCTION_ARGS)
{
return jsonb_agg_transfn_worker(fcinfo, true);
}
Datum
jsonb_agg_finalfn(PG_FUNCTION_ARGS)
{
@@ -1729,9 +1647,11 @@ jsonb_agg_finalfn(PG_FUNCTION_ARGS)
PG_RETURN_POINTER(out);
}
static Datum
jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo,
bool absent_on_null, bool unique_keys)
/*
* jsonb_object_agg aggregate function
*/
Datum
jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
{
MemoryContext oldcontext,
aggcontext;
@@ -1745,7 +1665,6 @@ jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo,
*jbval;
JsonbValue v;
JsonbIteratorToken type;
bool skip;
if (!AggCheckCallContext(fcinfo, &aggcontext))
{
@@ -1765,9 +1684,6 @@ jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo,
state->res = result;
result->res = pushJsonbValue(&result->parseState,
WJB_BEGIN_OBJECT, NULL);
result->parseState->unique_keys = unique_keys;
result->parseState->skip_nulls = absent_on_null;
MemoryContextSwitchTo(oldcontext);
arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1);
@@ -1803,15 +1719,6 @@ jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("field name must not be null")));
/*
* Skip null values if absent_on_null unless key uniqueness check is
* needed (because we must save keys in this case).
*/
skip = absent_on_null && PG_ARGISNULL(2);
if (skip && !unique_keys)
PG_RETURN_POINTER(state);
val = PG_GETARG_DATUM(1);
memset(&elem, 0, sizeof(JsonbInState));
@@ -1867,16 +1774,6 @@ jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo,
}
result->res = pushJsonbValue(&result->parseState,
WJB_KEY, &v);
if (skip)
{
v.type = jbvNull;
result->res = pushJsonbValue(&result->parseState,
WJB_VALUE, &v);
MemoryContextSwitchTo(oldcontext);
PG_RETURN_POINTER(state);
}
break;
case WJB_END_ARRAY:
break;
@@ -1949,43 +1846,6 @@ jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo,
PG_RETURN_POINTER(state);
}
/*
* jsonb_object_agg aggregate function
*/
Datum
jsonb_object_agg_transfn(PG_FUNCTION_ARGS)
{
return jsonb_object_agg_transfn_worker(fcinfo, false, false);
}
/*
* jsonb_object_agg_strict aggregate function
*/
Datum
jsonb_object_agg_strict_transfn(PG_FUNCTION_ARGS)
{
return jsonb_object_agg_transfn_worker(fcinfo, true, false);
}
/*
* jsonb_object_agg_unique aggregate function
*/
Datum
jsonb_object_agg_unique_transfn(PG_FUNCTION_ARGS)
{
return jsonb_object_agg_transfn_worker(fcinfo, false, true);
}
/*
* jsonb_object_agg_unique_strict aggregate function
*/
Datum
jsonb_object_agg_unique_strict_transfn(PG_FUNCTION_ARGS)
{
return jsonb_object_agg_transfn_worker(fcinfo, true, true);
}
Datum
jsonb_object_agg_finalfn(PG_FUNCTION_ARGS)
{
@@ -2217,65 +2077,3 @@ jsonb_float8(PG_FUNCTION_ARGS)
PG_RETURN_DATUM(retValue);
}
/*
* Construct an empty array jsonb.
*/
Jsonb *
JsonbMakeEmptyArray(void)
{
JsonbValue jbv;
jbv.type = jbvArray;
jbv.val.array.elems = NULL;
jbv.val.array.nElems = 0;
jbv.val.array.rawScalar = false;
return JsonbValueToJsonb(&jbv);
}
/*
* Construct an empty object jsonb.
*/
Jsonb *
JsonbMakeEmptyObject(void)
{
JsonbValue jbv;
jbv.type = jbvObject;
jbv.val.object.pairs = NULL;
jbv.val.object.nPairs = 0;
return JsonbValueToJsonb(&jbv);
}
/*
* Convert jsonb to a C-string stripping quotes from scalar strings.
*/
char *
JsonbUnquote(Jsonb *jb)
{
if (JB_ROOT_IS_SCALAR(jb))
{
JsonbValue v;
(void) JsonbExtractScalar(&jb->root, &v);
if (v.type == jbvString)
return pnstrdup(v.val.string.val, v.val.string.len);
else if (v.type == jbvBool)
return pstrdup(v.val.boolean ? "true" : "false");
else if (v.type == jbvNumeric)
return DatumGetCString(DirectFunctionCall1(numeric_out,
PointerGetDatum(v.val.numeric)));
else if (v.type == jbvNull)
return pstrdup("null");
else
{
elog(ERROR, "unrecognized jsonb value type %d", v.type);
return NULL;
}
}
else
return JsonbToCString(NULL, &jb->root, VARSIZE(jb));
}

View File

@@ -64,8 +64,7 @@ static int lengthCompareJsonbStringValue(const void *a, const void *b);
static int lengthCompareJsonbString(const char *val1, int len1,
const char *val2, int len2);
static int lengthCompareJsonbPair(const void *a, const void *b, void *arg);
static void uniqueifyJsonbObject(JsonbValue *object, bool unique_keys,
bool skip_nulls);
static void uniqueifyJsonbObject(JsonbValue *object);
static JsonbValue *pushJsonbValueScalar(JsonbParseState **pstate,
JsonbIteratorToken seq,
JsonbValue *scalarVal);
@@ -690,9 +689,7 @@ pushJsonbValueScalar(JsonbParseState **pstate, JsonbIteratorToken seq,
appendElement(*pstate, scalarVal);
break;
case WJB_END_OBJECT:
uniqueifyJsonbObject(&(*pstate)->contVal,
(*pstate)->unique_keys,
(*pstate)->skip_nulls);
uniqueifyJsonbObject(&(*pstate)->contVal);
/* fall through! */
case WJB_END_ARRAY:
/* Steps here common to WJB_END_OBJECT case */
@@ -735,9 +732,6 @@ pushState(JsonbParseState **pstate)
JsonbParseState *ns = palloc(sizeof(JsonbParseState));
ns->next = *pstate;
ns->unique_keys = false;
ns->skip_nulls = false;
return ns;
}
@@ -1942,7 +1936,7 @@ lengthCompareJsonbPair(const void *a, const void *b, void *binequal)
* Sort and unique-ify pairs in JsonbValue object
*/
static void
uniqueifyJsonbObject(JsonbValue *object, bool unique_keys, bool skip_nulls)
uniqueifyJsonbObject(JsonbValue *object)
{
bool hasNonUniq = false;
@@ -1952,32 +1946,15 @@ uniqueifyJsonbObject(JsonbValue *object, bool unique_keys, bool skip_nulls)
qsort_arg(object->val.object.pairs, object->val.object.nPairs, sizeof(JsonbPair),
lengthCompareJsonbPair, &hasNonUniq);
if (hasNonUniq && unique_keys)
ereport(ERROR,
(errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE),
errmsg("duplicate JSON object key value")));
if (hasNonUniq || skip_nulls)
if (hasNonUniq)
{
JsonbPair *ptr,
*res;
while (skip_nulls && object->val.object.nPairs > 0 &&
object->val.object.pairs->value.type == jbvNull)
{
/* If skip_nulls is true, remove leading items with null */
object->val.object.pairs++;
object->val.object.nPairs--;
}
ptr = object->val.object.pairs + 1;
res = object->val.object.pairs;
JsonbPair *ptr = object->val.object.pairs + 1,
*res = object->val.object.pairs;
while (ptr - object->val.object.pairs < object->val.object.nPairs)
{
/* Avoid copying over duplicate or null */
if (lengthCompareJsonbStringValue(ptr, res) != 0 &&
(!skip_nulls || ptr->value.type != jbvNull))
/* Avoid copying over duplicate */
if (lengthCompareJsonbStringValue(ptr, res) != 0)
{
res++;
if (ptr != res)

View File

@@ -2656,11 +2656,11 @@ populate_array_dim_jsonb(PopulateArrayContext *ctx, /* context */
check_stack_depth();
if (jbv->type != jbvBinary ||
!JsonContainerIsArray(jbc) ||
JsonContainerIsScalar(jbc))
if (jbv->type != jbvBinary || !JsonContainerIsArray(jbc))
populate_array_report_expected_array(ctx, ndim - 1);
Assert(!JsonContainerIsScalar(jbc));
it = JsonbIteratorInit(jbc);
tok = JsonbIteratorNext(&it, &val, true);
@@ -3132,51 +3132,6 @@ populate_record_field(ColumnIOData *col,
}
}
/* recursively populate specified type from a json/jsonb value */
Datum
json_populate_type(Datum json_val, Oid json_type, Oid typid, int32 typmod,
void **cache, MemoryContext mcxt, bool *isnull)
{
JsValue jsv = {0};
JsonbValue jbv;
jsv.is_json = json_type == JSONOID;
if (*isnull)
{
if (jsv.is_json)
jsv.val.json.str = NULL;
else
jsv.val.jsonb = NULL;
}
else if (jsv.is_json)
{
text *json = DatumGetTextPP(json_val);
jsv.val.json.str = VARDATA_ANY(json);
jsv.val.json.len = VARSIZE_ANY_EXHDR(json);
jsv.val.json.type = JSON_TOKEN_INVALID; /* not used in
* populate_composite() */
}
else
{
Jsonb *jsonb = DatumGetJsonbP(json_val);
jsv.val.jsonb = &jbv;
/* fill binary jsonb value pointing to jb */
jbv.type = jbvBinary;
jbv.val.binary.data = &jsonb->root;
jbv.val.binary.len = VARSIZE(jsonb) - VARHDRSZ;
}
if (!*cache)
*cache = MemoryContextAllocZero(mcxt, sizeof(ColumnIOData));
return populate_record_field(*cache, typid, typmod, NULL, mcxt,
PointerGetDatum(NULL), &jsv, isnull);
}
static RecordIOData *
allocate_record_info(MemoryContext mcxt, int ncolumns)
{
@@ -5566,23 +5521,3 @@ transform_string_values_scalar(void *state, char *token, JsonTokenType tokentype
else
appendStringInfoString(_state->strval, token);
}
JsonTokenType
json_get_first_token(text *json, bool throw_error)
{
JsonLexContext *lex;
JsonParseErrorType result;
lex = makeJsonLexContext(json, false);
/* Lex exactly one token from the input and check its type. */
result = json_lex(lex);
if (result == JSON_SUCCESS)
return lex->token_type;
if (throw_error)
json_ereport_error(result, lex);
return JSON_TOKEN_INVALID; /* invalid json */
}

View File

@@ -67,9 +67,7 @@
#include "lib/stringinfo.h"
#include "libpq/pqformat.h"
#include "miscadmin.h"
#include "nodes/nodeFuncs.h"
#include "utils/builtins.h"
#include "utils/formatting.h"
#include "utils/json.h"
#include "utils/jsonpath.h"
@@ -1079,258 +1077,3 @@ jspGetArraySubscript(JsonPathItem *v, JsonPathItem *from, JsonPathItem *to,
return true;
}
/* SQL/JSON datatype status: */
typedef enum JsonPathDatatypeStatus
{
jpdsNonDateTime, /* null, bool, numeric, string, array, object */
jpdsUnknownDateTime, /* unknown datetime type */
jpdsDateTimeZoned, /* timetz, timestamptz */
jpdsDateTimeNonZoned /* time, timestamp, date */
} JsonPathDatatypeStatus;
/* Context for jspIsMutableWalker() */
typedef struct JsonPathMutableContext
{
List *varnames; /* list of variable names */
List *varexprs; /* list of variable expressions */
JsonPathDatatypeStatus current; /* status of @ item */
bool lax; /* jsonpath is lax or strict */
bool mutable; /* resulting mutability status */
} JsonPathMutableContext;
/*
* Recursive walker for jspIsMutable()
*/
static JsonPathDatatypeStatus
jspIsMutableWalker(JsonPathItem *jpi, JsonPathMutableContext *cxt)
{
JsonPathItem next;
JsonPathDatatypeStatus status = jpdsNonDateTime;
while (!cxt->mutable)
{
JsonPathItem arg;
JsonPathDatatypeStatus leftStatus;
JsonPathDatatypeStatus rightStatus;
switch (jpi->type)
{
case jpiRoot:
Assert(status == jpdsNonDateTime);
break;
case jpiCurrent:
Assert(status == jpdsNonDateTime);
status = cxt->current;
break;
case jpiFilter:
{
JsonPathDatatypeStatus prevStatus = cxt->current;
cxt->current = status;
jspGetArg(jpi, &arg);
jspIsMutableWalker(&arg, cxt);
cxt->current = prevStatus;
break;
}
case jpiVariable:
{
int32 len;
const char *name = jspGetString(jpi, &len);
ListCell *lc1;
ListCell *lc2;
Assert(status == jpdsNonDateTime);
forboth(lc1, cxt->varnames, lc2, cxt->varexprs)
{
String *varname = lfirst_node(String, lc1);
Node *varexpr = lfirst(lc2);
if (strncmp(varname->sval, name, len))
continue;
switch (exprType(varexpr))
{
case DATEOID:
case TIMEOID:
case TIMESTAMPOID:
status = jpdsDateTimeNonZoned;
break;
case TIMETZOID:
case TIMESTAMPTZOID:
status = jpdsDateTimeZoned;
break;
default:
status = jpdsNonDateTime;
break;
}
break;
}
break;
}
case jpiEqual:
case jpiNotEqual:
case jpiLess:
case jpiGreater:
case jpiLessOrEqual:
case jpiGreaterOrEqual:
Assert(status == jpdsNonDateTime);
jspGetLeftArg(jpi, &arg);
leftStatus = jspIsMutableWalker(&arg, cxt);
jspGetRightArg(jpi, &arg);
rightStatus = jspIsMutableWalker(&arg, cxt);
/*
* Comparison of datetime type with different timezone status
* is mutable.
*/
if (leftStatus != jpdsNonDateTime &&
rightStatus != jpdsNonDateTime &&
(leftStatus == jpdsUnknownDateTime ||
rightStatus == jpdsUnknownDateTime ||
leftStatus != rightStatus))
cxt->mutable = true;
break;
case jpiNot:
case jpiIsUnknown:
case jpiExists:
case jpiPlus:
case jpiMinus:
Assert(status == jpdsNonDateTime);
jspGetArg(jpi, &arg);
jspIsMutableWalker(&arg, cxt);
break;
case jpiAnd:
case jpiOr:
case jpiAdd:
case jpiSub:
case jpiMul:
case jpiDiv:
case jpiMod:
case jpiStartsWith:
Assert(status == jpdsNonDateTime);
jspGetLeftArg(jpi, &arg);
jspIsMutableWalker(&arg, cxt);
jspGetRightArg(jpi, &arg);
jspIsMutableWalker(&arg, cxt);
break;
case jpiIndexArray:
for (int i = 0; i < jpi->content.array.nelems; i++)
{
JsonPathItem from;
JsonPathItem to;
if (jspGetArraySubscript(jpi, &from, &to, i))
jspIsMutableWalker(&to, cxt);
jspIsMutableWalker(&from, cxt);
}
/* FALLTHROUGH */
case jpiAnyArray:
if (!cxt->lax)
status = jpdsNonDateTime;
break;
case jpiAny:
if (jpi->content.anybounds.first > 0)
status = jpdsNonDateTime;
break;
case jpiDatetime:
if (jpi->content.arg)
{
char *template;
int flags;
jspGetArg(jpi, &arg);
if (arg.type != jpiString)
{
status = jpdsNonDateTime;
break; /* there will be runtime error */
}
template = jspGetString(&arg, NULL);
flags = datetime_format_flags(template, NULL);
if (flags & DCH_ZONED)
status = jpdsDateTimeZoned;
else
status = jpdsDateTimeNonZoned;
}
else
{
status = jpdsUnknownDateTime;
}
break;
case jpiLikeRegex:
Assert(status == jpdsNonDateTime);
jspInitByBuffer(&arg, jpi->base, jpi->content.like_regex.expr);
jspIsMutableWalker(&arg, cxt);
break;
/* literals */
case jpiNull:
case jpiString:
case jpiNumeric:
case jpiBool:
/* accessors */
case jpiKey:
case jpiAnyKey:
/* special items */
case jpiSubscript:
case jpiLast:
/* item methods */
case jpiType:
case jpiSize:
case jpiAbs:
case jpiFloor:
case jpiCeiling:
case jpiDouble:
case jpiKeyValue:
status = jpdsNonDateTime;
break;
}
if (!jspGetNext(jpi, &next))
break;
jpi = &next;
}
return status;
}
/*
* Check whether jsonpath expression is immutable or not.
*/
bool
jspIsMutable(JsonPath *path, List *varnames, List *varexprs)
{
JsonPathMutableContext cxt;
JsonPathItem jpi;
cxt.varnames = varnames;
cxt.varexprs = varexprs;
cxt.current = jpdsNonDateTime;
cxt.lax = (path->header & JSONPATH_LAX) != 0;
cxt.mutable = false;
jspInit(&jpi, path);
jspIsMutableWalker(&jpi, &cxt);
return cxt.mutable;
}

View File

@@ -61,11 +61,9 @@
#include "catalog/pg_collation.h"
#include "catalog/pg_type.h"
#include "executor/execExpr.h"
#include "funcapi.h"
#include "lib/stringinfo.h"
#include "miscadmin.h"
#include "nodes/nodeFuncs.h"
#include "regex/regex.h"
#include "utils/builtins.h"
#include "utils/date.h"
@@ -76,8 +74,6 @@
#include "utils/guc.h"
#include "utils/json.h"
#include "utils/jsonpath.h"
#include "utils/lsyscache.h"
#include "utils/memutils.h"
#include "utils/timestamp.h"
#include "utils/varlena.h"
@@ -90,16 +86,12 @@ typedef struct JsonBaseObjectInfo
int id;
} JsonBaseObjectInfo;
typedef int (*JsonPathVarCallback) (void *vars, char *varName, int varNameLen,
JsonbValue *val, JsonbValue *baseObject);
/*
* Context of jsonpath execution.
*/
typedef struct JsonPathExecContext
{
void *vars; /* variables to substitute into jsonpath */
JsonPathVarCallback getVar;
Jsonb *vars; /* variables to substitute into jsonpath */
JsonbValue *root; /* for $ evaluation */
JsonbValue *current; /* for @ evaluation */
JsonBaseObjectInfo baseObject; /* "base object" for .keyvalue()
@@ -159,59 +151,6 @@ typedef struct JsonValueListIterator
ListCell *next;
} JsonValueListIterator;
/* Structures for JSON_TABLE execution */
typedef struct JsonTableScanState JsonTableScanState;
typedef struct JsonTableJoinState JsonTableJoinState;
struct JsonTableScanState
{
JsonTableScanState *parent;
JsonTableJoinState *nested;
MemoryContext mcxt;
JsonPath *path;
List *args;
JsonValueList found;
JsonValueListIterator iter;
Datum current;
int ordinal;
bool currentIsNull;
bool outerJoin;
bool errorOnError;
bool advanceNested;
bool reset;
};
struct JsonTableJoinState
{
union
{
struct
{
JsonTableJoinState *left;
JsonTableJoinState *right;
bool cross;
bool advanceRight;
} join;
JsonTableScanState scan;
} u;
bool is_join;
};
/* random number to identify JsonTableContext */
#define JSON_TABLE_CONTEXT_MAGIC 418352867
typedef struct JsonTableContext
{
int magic;
struct
{
ExprState *expr;
JsonTableScanState *scan;
} *colexprs;
JsonTableScanState root;
bool empty;
} JsonTableContext;
/* strict/lax flags is decomposed into four [un]wrap/error flags */
#define jspStrictAbsenseOfErrors(cxt) (!(cxt)->laxMode)
#define jspAutoUnwrap(cxt) ((cxt)->laxMode)
@@ -234,8 +173,7 @@ typedef JsonPathBool (*JsonPathPredicateCallback) (JsonPathItem *jsp,
void *param);
typedef Numeric (*BinaryArithmFunc) (Numeric num1, Numeric num2, bool *error);
static JsonPathExecResult executeJsonPath(JsonPath *path, void *vars,
JsonPathVarCallback getVar,
static JsonPathExecResult executeJsonPath(JsonPath *path, Jsonb *vars,
Jsonb *json, bool throwErrors,
JsonValueList *result, bool useTz);
static JsonPathExecResult executeItem(JsonPathExecContext *cxt,
@@ -287,10 +225,7 @@ static JsonPathExecResult appendBoolResult(JsonPathExecContext *cxt,
static void getJsonPathItem(JsonPathExecContext *cxt, JsonPathItem *item,
JsonbValue *value);
static void getJsonPathVariable(JsonPathExecContext *cxt,
JsonPathItem *variable, JsonbValue *value);
static int getJsonPathVariableFromJsonb(void *varsJsonb, char *varName,
int varNameLen, JsonbValue *val,
JsonbValue *baseObject);
JsonPathItem *variable, Jsonb *vars, JsonbValue *value);
static int JsonbArraySize(JsonbValue *jb);
static JsonPathBool executeComparison(JsonPathItem *cmp, JsonbValue *lv,
JsonbValue *rv, void *p);
@@ -302,7 +237,6 @@ static JsonPathExecResult getArrayIndex(JsonPathExecContext *cxt,
JsonPathItem *jsp, JsonbValue *jb, int32 *index);
static JsonBaseObjectInfo setBaseObject(JsonPathExecContext *cxt,
JsonbValue *jbv, int32 id);
static void JsonValueListClear(JsonValueList *jvl);
static void JsonValueListAppend(JsonValueList *jvl, JsonbValue *jbv);
static int JsonValueListLength(const JsonValueList *jvl);
static bool JsonValueListIsEmpty(JsonValueList *jvl);
@@ -320,12 +254,6 @@ static JsonbValue *wrapItemsInArray(const JsonValueList *items);
static int compareDatetime(Datum val1, Oid typid1, Datum val2, Oid typid2,
bool useTz, bool *have_error);
static JsonTableJoinState *JsonTableInitPlanState(JsonTableContext *cxt,
Node *plan, JsonTableScanState *parent);
static bool JsonTableNextRow(JsonTableScanState *scan);
/****************** User interface to JsonPath executor ********************/
/*
@@ -355,8 +283,7 @@ jsonb_path_exists_internal(FunctionCallInfo fcinfo, bool tz)
silent = PG_GETARG_BOOL(3);
}
res = executeJsonPath(jp, vars, getJsonPathVariableFromJsonb,
jb, !silent, NULL, tz);
res = executeJsonPath(jp, vars, jb, !silent, NULL, tz);
PG_FREE_IF_COPY(jb, 0);
PG_FREE_IF_COPY(jp, 1);
@@ -411,8 +338,7 @@ jsonb_path_match_internal(FunctionCallInfo fcinfo, bool tz)
silent = PG_GETARG_BOOL(3);
}
(void) executeJsonPath(jp, vars, getJsonPathVariableFromJsonb,
jb, !silent, &found, tz);
(void) executeJsonPath(jp, vars, jb, !silent, &found, tz);
PG_FREE_IF_COPY(jb, 0);
PG_FREE_IF_COPY(jp, 1);
@@ -490,8 +416,7 @@ jsonb_path_query_internal(FunctionCallInfo fcinfo, bool tz)
vars = PG_GETARG_JSONB_P_COPY(2);
silent = PG_GETARG_BOOL(3);
(void) executeJsonPath(jp, vars, getJsonPathVariableFromJsonb,
jb, !silent, &found, tz);
(void) executeJsonPath(jp, vars, jb, !silent, &found, tz);
funcctx->user_fctx = JsonValueListGetList(&found);
@@ -538,8 +463,7 @@ jsonb_path_query_array_internal(FunctionCallInfo fcinfo, bool tz)
Jsonb *vars = PG_GETARG_JSONB_P(2);
bool silent = PG_GETARG_BOOL(3);
(void) executeJsonPath(jp, vars, getJsonPathVariableFromJsonb,
jb, !silent, &found, tz);
(void) executeJsonPath(jp, vars, jb, !silent, &found, tz);
PG_RETURN_JSONB_P(JsonbValueToJsonb(wrapItemsInArray(&found)));
}
@@ -570,8 +494,7 @@ jsonb_path_query_first_internal(FunctionCallInfo fcinfo, bool tz)
Jsonb *vars = PG_GETARG_JSONB_P(2);
bool silent = PG_GETARG_BOOL(3);
(void) executeJsonPath(jp, vars, getJsonPathVariableFromJsonb,
jb, !silent, &found, tz);
(void) executeJsonPath(jp, vars, jb, !silent, &found, tz);
if (JsonValueListLength(&found) >= 1)
PG_RETURN_JSONB_P(JsonbValueToJsonb(JsonValueListHead(&found)));
@@ -613,9 +536,8 @@ jsonb_path_query_first_tz(PG_FUNCTION_ARGS)
* In other case it tries to find all the satisfied result items.
*/
static JsonPathExecResult
executeJsonPath(JsonPath *path, void *vars, JsonPathVarCallback getVar,
Jsonb *json, bool throwErrors, JsonValueList *result,
bool useTz)
executeJsonPath(JsonPath *path, Jsonb *vars, Jsonb *json, bool throwErrors,
JsonValueList *result, bool useTz)
{
JsonPathExecContext cxt;
JsonPathExecResult res;
@@ -627,16 +549,22 @@ executeJsonPath(JsonPath *path, void *vars, JsonPathVarCallback getVar,
if (!JsonbExtractScalar(&json->root, &jbv))
JsonbInitBinary(&jbv, json);
if (vars && !JsonContainerIsObject(&vars->root))
{
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("\"vars\" argument is not an object"),
errdetail("Jsonpath parameters should be encoded as key-value pairs of \"vars\" object.")));
}
cxt.vars = vars;
cxt.getVar = getVar;
cxt.laxMode = (path->header & JSONPATH_LAX) != 0;
cxt.ignoreStructuralErrors = cxt.laxMode;
cxt.root = &jbv;
cxt.current = &jbv;
cxt.baseObject.jbc = NULL;
cxt.baseObject.id = 0;
/* 1 + number of base objects in vars */
cxt.lastGeneratedObjectId = 1 + getVar(vars, NULL, 0, NULL, NULL);
cxt.lastGeneratedObjectId = vars ? 2 : 1;
cxt.innermostArraySize = -1;
cxt.throwErrors = throwErrors;
cxt.useTz = useTz;
@@ -2165,7 +2093,7 @@ getJsonPathItem(JsonPathExecContext *cxt, JsonPathItem *item,
&value->val.string.len);
break;
case jpiVariable:
getJsonPathVariable(cxt, item, value);
getJsonPathVariable(cxt, item, cxt->vars, value);
return;
default:
elog(ERROR, "unexpected jsonpath item type");
@@ -2177,63 +2105,42 @@ getJsonPathItem(JsonPathExecContext *cxt, JsonPathItem *item,
*/
static void
getJsonPathVariable(JsonPathExecContext *cxt, JsonPathItem *variable,
JsonbValue *value)
Jsonb *vars, JsonbValue *value)
{
char *varName;
int varNameLength;
JsonbValue baseObject;
int baseObjectId;
Assert(variable->type == jpiVariable);
varName = jspGetString(variable, &varNameLength);
if (!cxt->vars ||
(baseObjectId = cxt->getVar(cxt->vars, varName, varNameLength, value,
&baseObject)) < 0)
ereport(ERROR,
(errcode(ERRCODE_UNDEFINED_OBJECT),
errmsg("could not find jsonpath variable \"%s\"",
pnstrdup(varName, varNameLength))));
if (baseObjectId > 0)
setBaseObject(cxt, &baseObject, baseObjectId);
}
static int
getJsonPathVariableFromJsonb(void *varsJsonb, char *varName, int varNameLength,
JsonbValue *value, JsonbValue *baseObject)
{
Jsonb *vars = varsJsonb;
JsonbValue tmp;
JsonbValue *v;
if (!varName)
if (!vars)
{
if (vars && !JsonContainerIsObject(&vars->root))
{
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("\"vars\" argument is not an object"),
errdetail("Jsonpath parameters should be encoded as key-value pairs of \"vars\" object.")));
}
return vars ? 1 : 0; /* count of base objects */
value->type = jbvNull;
return;
}
Assert(variable->type == jpiVariable);
varName = jspGetString(variable, &varNameLength);
tmp.type = jbvString;
tmp.val.string.val = varName;
tmp.val.string.len = varNameLength;
v = findJsonbValueFromContainer(&vars->root, JB_FOBJECT, &tmp);
if (!v)
return -1;
if (v)
{
*value = *v;
pfree(v);
}
else
{
ereport(ERROR,
(errcode(ERRCODE_UNDEFINED_OBJECT),
errmsg("could not find jsonpath variable \"%s\"",
pnstrdup(varName, varNameLength))));
}
*value = *v;
pfree(v);
JsonbInitBinary(baseObject, vars);
return 1;
JsonbInitBinary(&tmp, vars);
setBaseObject(cxt, &tmp, 1);
}
/**************** Support functions for JsonPath execution *****************/
@@ -2522,13 +2429,6 @@ setBaseObject(JsonPathExecContext *cxt, JsonbValue *jbv, int32 id)
return baseObject;
}
static void
JsonValueListClear(JsonValueList *jvl)
{
jvl->singleton = NULL;
jvl->list = NULL;
}
static void
JsonValueListAppend(JsonValueList *jvl, JsonbValue *jbv)
{
@@ -2897,667 +2797,3 @@ compareDatetime(Datum val1, Oid typid1, Datum val2, Oid typid2,
return DatumGetInt32(DirectFunctionCall2(cmpfunc, val1, val2));
}
/********************Interface to pgsql's executor***************************/
bool
JsonPathExists(Datum jb, JsonPath *jp, List *vars, bool *error)
{
JsonPathExecResult res = executeJsonPath(jp, vars, EvalJsonPathVar,
DatumGetJsonbP(jb), !error, NULL,
true);
Assert(error || !jperIsError(res));
if (error && jperIsError(res))
*error = true;
return res == jperOk;
}
Datum
JsonPathQuery(Datum jb, JsonPath *jp, JsonWrapper wrapper, bool *empty,
bool *error, List *vars)
{
JsonbValue *first;
bool wrap;
JsonValueList found = {0};
JsonPathExecResult res PG_USED_FOR_ASSERTS_ONLY;
int count;
res = executeJsonPath(jp, vars, EvalJsonPathVar, DatumGetJsonbP(jb), !error,
&found, true);
Assert(error || !jperIsError(res));
if (error && jperIsError(res))
{
*error = true;
*empty = false;
return (Datum) 0;
}
count = JsonValueListLength(&found);
first = count ? JsonValueListHead(&found) : NULL;
if (!first)
wrap = false;
else if (wrapper == JSW_NONE)
wrap = false;
else if (wrapper == JSW_UNCONDITIONAL)
wrap = true;
else if (wrapper == JSW_CONDITIONAL)
wrap = count > 1 ||
IsAJsonbScalar(first) ||
(first->type == jbvBinary &&
JsonContainerIsScalar(first->val.binary.data));
else
{
elog(ERROR, "unrecognized json wrapper %d", wrapper);
wrap = false;
}
if (wrap)
return JsonbPGetDatum(JsonbValueToJsonb(wrapItemsInArray(&found)));
if (count > 1)
{
if (error)
{
*error = true;
return (Datum) 0;
}
ereport(ERROR,
(errcode(ERRCODE_MORE_THAN_ONE_SQL_JSON_ITEM),
errmsg("JSON path expression in JSON_QUERY should return "
"singleton item without wrapper"),
errhint("Use WITH WRAPPER clause to wrap SQL/JSON item "
"sequence into array.")));
}
if (first)
return JsonbPGetDatum(JsonbValueToJsonb(first));
*empty = true;
return PointerGetDatum(NULL);
}
JsonbValue *
JsonPathValue(Datum jb, JsonPath *jp, bool *empty, bool *error, List *vars)
{
JsonbValue *res;
JsonValueList found = {0};
JsonPathExecResult jper PG_USED_FOR_ASSERTS_ONLY;
int count;
jper = executeJsonPath(jp, vars, EvalJsonPathVar, DatumGetJsonbP(jb), !error,
&found, true);
Assert(error || !jperIsError(jper));
if (error && jperIsError(jper))
{
*error = true;
*empty = false;
return NULL;
}
count = JsonValueListLength(&found);
*empty = !count;
if (*empty)
return NULL;
if (count > 1)
{
if (error)
{
*error = true;
return NULL;
}
ereport(ERROR,
(errcode(ERRCODE_MORE_THAN_ONE_SQL_JSON_ITEM),
errmsg("JSON path expression in JSON_VALUE should return "
"singleton scalar item")));
}
res = JsonValueListHead(&found);
if (res->type == jbvBinary &&
JsonContainerIsScalar(res->val.binary.data))
JsonbExtractScalar(res->val.binary.data, res);
if (!IsAJsonbScalar(res))
{
if (error)
{
*error = true;
return NULL;
}
ereport(ERROR,
(errcode(ERRCODE_SQL_JSON_SCALAR_REQUIRED),
errmsg("JSON path expression in JSON_VALUE should return "
"singleton scalar item")));
}
if (res->type == jbvNull)
return NULL;
return res;
}
static void
JsonbValueInitNumericDatum(JsonbValue *jbv, Datum num)
{
jbv->type = jbvNumeric;
jbv->val.numeric = DatumGetNumeric(num);
}
void
JsonItemFromDatum(Datum val, Oid typid, int32 typmod, JsonbValue *res)
{
switch (typid)
{
case BOOLOID:
res->type = jbvBool;
res->val.boolean = DatumGetBool(val);
break;
case NUMERICOID:
JsonbValueInitNumericDatum(res, val);
break;
case INT2OID:
JsonbValueInitNumericDatum(res, DirectFunctionCall1(int2_numeric, val));
break;
case INT4OID:
JsonbValueInitNumericDatum(res, DirectFunctionCall1(int4_numeric, val));
break;
case INT8OID:
JsonbValueInitNumericDatum(res, DirectFunctionCall1(int8_numeric, val));
break;
case FLOAT4OID:
JsonbValueInitNumericDatum(res, DirectFunctionCall1(float4_numeric, val));
break;
case FLOAT8OID:
JsonbValueInitNumericDatum(res, DirectFunctionCall1(float8_numeric, val));
break;
case TEXTOID:
case VARCHAROID:
res->type = jbvString;
res->val.string.val = VARDATA_ANY(val);
res->val.string.len = VARSIZE_ANY_EXHDR(val);
break;
case DATEOID:
case TIMEOID:
case TIMETZOID:
case TIMESTAMPOID:
case TIMESTAMPTZOID:
res->type = jbvDatetime;
res->val.datetime.value = val;
res->val.datetime.typid = typid;
res->val.datetime.typmod = typmod;
res->val.datetime.tz = 0;
break;
case JSONBOID:
{
JsonbValue *jbv = res;
Jsonb *jb = DatumGetJsonbP(val);
if (JsonContainerIsScalar(&jb->root))
{
bool result PG_USED_FOR_ASSERTS_ONLY;
result = JsonbExtractScalar(&jb->root, jbv);
Assert(result);
}
else
JsonbInitBinary(jbv, jb);
break;
}
case JSONOID:
{
text *txt = DatumGetTextP(val);
char *str = text_to_cstring(txt);
Jsonb *jb =
DatumGetJsonbP(DirectFunctionCall1(jsonb_in,
CStringGetDatum(str)));
pfree(str);
JsonItemFromDatum(JsonbPGetDatum(jb), JSONBOID, -1, res);
break;
}
default:
ereport(ERROR,
(errcode(ERRCODE_INVALID_PARAMETER_VALUE),
errmsg("only bool, numeric, and text types could be "
"casted to supported jsonpath types.")));
}
}
/************************ JSON_TABLE functions ***************************/
/*
* Returns private data from executor state. Ensure validity by check with
* MAGIC number.
*/
static inline JsonTableContext *
GetJsonTableContext(TableFuncScanState *state, const char *fname)
{
JsonTableContext *result;
if (!IsA(state, TableFuncScanState))
elog(ERROR, "%s called with invalid TableFuncScanState", fname);
result = (JsonTableContext *) state->opaque;
if (result->magic != JSON_TABLE_CONTEXT_MAGIC)
elog(ERROR, "%s called with invalid TableFuncScanState", fname);
return result;
}
/* Recursively initialize JSON_TABLE scan state */
static void
JsonTableInitScanState(JsonTableContext *cxt, JsonTableScanState *scan,
JsonTableParent *node, JsonTableScanState *parent,
List *args, MemoryContext mcxt)
{
int i;
scan->parent = parent;
scan->outerJoin = node->outerJoin;
scan->errorOnError = node->errorOnError;
scan->path = DatumGetJsonPathP(node->path->constvalue);
scan->args = args;
scan->mcxt = AllocSetContextCreate(mcxt, "JsonTableContext",
ALLOCSET_DEFAULT_SIZES);
scan->nested = node->child ?
JsonTableInitPlanState(cxt, node->child, scan) : NULL;
scan->current = PointerGetDatum(NULL);
scan->currentIsNull = true;
for (i = node->colMin; i <= node->colMax; i++)
cxt->colexprs[i].scan = scan;
}
/* Recursively initialize JSON_TABLE scan state */
static JsonTableJoinState *
JsonTableInitPlanState(JsonTableContext *cxt, Node *plan,
JsonTableScanState *parent)
{
JsonTableJoinState *state = palloc0(sizeof(*state));
if (IsA(plan, JsonTableSibling))
{
JsonTableSibling *join = castNode(JsonTableSibling, plan);
state->is_join = true;
state->u.join.cross = join->cross;
state->u.join.left = JsonTableInitPlanState(cxt, join->larg, parent);
state->u.join.right = JsonTableInitPlanState(cxt, join->rarg, parent);
}
else
{
JsonTableParent *node = castNode(JsonTableParent, plan);
state->is_join = false;
JsonTableInitScanState(cxt, &state->u.scan, node, parent,
parent->args, parent->mcxt);
}
return state;
}
/*
* JsonTableInitOpaque
* Fill in TableFuncScanState->opaque for JsonTable processor
*/
static void
JsonTableInitOpaque(TableFuncScanState *state, int natts)
{
JsonTableContext *cxt;
PlanState *ps = &state->ss.ps;
TableFuncScan *tfs = castNode(TableFuncScan, ps->plan);
TableFunc *tf = tfs->tablefunc;
JsonExpr *ci = castNode(JsonExpr, tf->docexpr);
JsonTableParent *root = castNode(JsonTableParent, tf->plan);
List *args = NIL;
ListCell *lc;
int i;
cxt = palloc0(sizeof(JsonTableContext));
cxt->magic = JSON_TABLE_CONTEXT_MAGIC;
if (ci->passing_values)
{
ListCell *exprlc;
ListCell *namelc;
forboth(exprlc, ci->passing_values,
namelc, ci->passing_names)
{
Expr *expr = (Expr *) lfirst(exprlc);
String *name = lfirst_node(String, namelc);
JsonPathVariableEvalContext *var = palloc(sizeof(*var));
var->name = pstrdup(name->sval);
var->typid = exprType((Node *) expr);
var->typmod = exprTypmod((Node *) expr);
var->estate = ExecInitExpr(expr, ps);
var->econtext = ps->ps_ExprContext;
var->mcxt = CurrentMemoryContext;
var->evaluated = false;
var->value = (Datum) 0;
var->isnull = true;
args = lappend(args, var);
}
}
cxt->colexprs = palloc(sizeof(*cxt->colexprs) *
list_length(tf->colvalexprs));
JsonTableInitScanState(cxt, &cxt->root, root, NULL, args,
CurrentMemoryContext);
i = 0;
foreach(lc, tf->colvalexprs)
{
Expr *expr = lfirst(lc);
cxt->colexprs[i].expr =
ExecInitExprWithCaseValue(expr, ps,
&cxt->colexprs[i].scan->current,
&cxt->colexprs[i].scan->currentIsNull);
i++;
}
state->opaque = cxt;
}
/* Reset scan iterator to the beginning of the item list */
static void
JsonTableRescan(JsonTableScanState *scan)
{
JsonValueListInitIterator(&scan->found, &scan->iter);
scan->current = PointerGetDatum(NULL);
scan->currentIsNull = true;
scan->advanceNested = false;
scan->ordinal = 0;
}
/* Reset context item of a scan, execute JSON path and reset a scan */
static void
JsonTableResetContextItem(JsonTableScanState *scan, Datum item)
{
MemoryContext oldcxt;
JsonPathExecResult res;
Jsonb *js = (Jsonb *) DatumGetJsonbP(item);
JsonValueListClear(&scan->found);
MemoryContextResetOnly(scan->mcxt);
oldcxt = MemoryContextSwitchTo(scan->mcxt);
res = executeJsonPath(scan->path, scan->args, EvalJsonPathVar, js,
scan->errorOnError, &scan->found, false /* FIXME */ );
MemoryContextSwitchTo(oldcxt);
if (jperIsError(res))
{
Assert(!scan->errorOnError);
JsonValueListClear(&scan->found); /* EMPTY ON ERROR case */
}
JsonTableRescan(scan);
}
/*
* JsonTableSetDocument
* Install the input document
*/
static void
JsonTableSetDocument(TableFuncScanState *state, Datum value)
{
JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableSetDocument");
JsonTableResetContextItem(&cxt->root, value);
}
/* Recursively reset scan and its child nodes */
static void
JsonTableRescanRecursive(JsonTableJoinState *state)
{
if (state->is_join)
{
JsonTableRescanRecursive(state->u.join.left);
JsonTableRescanRecursive(state->u.join.right);
state->u.join.advanceRight = false;
}
else
{
JsonTableRescan(&state->u.scan);
if (state->u.scan.nested)
JsonTableRescanRecursive(state->u.scan.nested);
}
}
/*
* Fetch next row from a cross/union joined scan.
*
* Returns false at the end of a scan, true otherwise.
*/
static bool
JsonTableNextJoinRow(JsonTableJoinState *state)
{
if (!state->is_join)
return JsonTableNextRow(&state->u.scan);
if (state->u.join.advanceRight)
{
/* fetch next inner row */
if (JsonTableNextJoinRow(state->u.join.right))
return true;
/* inner rows are exhausted */
if (state->u.join.cross)
state->u.join.advanceRight = false; /* next outer row */
else
return false; /* end of scan */
}
while (!state->u.join.advanceRight)
{
/* fetch next outer row */
bool left = JsonTableNextJoinRow(state->u.join.left);
if (state->u.join.cross)
{
if (!left)
return false; /* end of scan */
JsonTableRescanRecursive(state->u.join.right);
if (!JsonTableNextJoinRow(state->u.join.right))
continue; /* next outer row */
state->u.join.advanceRight = true; /* next inner row */
}
else if (!left)
{
if (!JsonTableNextJoinRow(state->u.join.right))
return false; /* end of scan */
state->u.join.advanceRight = true; /* next inner row */
}
break;
}
return true;
}
/* Recursively set 'reset' flag of scan and its child nodes */
static void
JsonTableJoinReset(JsonTableJoinState *state)
{
if (state->is_join)
{
JsonTableJoinReset(state->u.join.left);
JsonTableJoinReset(state->u.join.right);
state->u.join.advanceRight = false;
}
else
{
state->u.scan.reset = true;
state->u.scan.advanceNested = false;
if (state->u.scan.nested)
JsonTableJoinReset(state->u.scan.nested);
}
}
/*
* Fetch next row from a simple scan with outer/inner joined nested subscans.
*
* Returns false at the end of a scan, true otherwise.
*/
static bool
JsonTableNextRow(JsonTableScanState *scan)
{
/* reset context item if requested */
if (scan->reset)
{
Assert(!scan->parent->currentIsNull);
JsonTableResetContextItem(scan, scan->parent->current);
scan->reset = false;
}
if (scan->advanceNested)
{
/* fetch next nested row */
scan->advanceNested = JsonTableNextJoinRow(scan->nested);
if (scan->advanceNested)
return true;
}
for (;;)
{
/* fetch next row */
JsonbValue *jbv = JsonValueListNext(&scan->found, &scan->iter);
MemoryContext oldcxt;
if (!jbv)
{
scan->current = PointerGetDatum(NULL);
scan->currentIsNull = true;
return false; /* end of scan */
}
/* set current row item */
oldcxt = MemoryContextSwitchTo(scan->mcxt);
scan->current = JsonbPGetDatum(JsonbValueToJsonb(jbv));
scan->currentIsNull = false;
MemoryContextSwitchTo(oldcxt);
scan->ordinal++;
if (!scan->nested)
break;
JsonTableJoinReset(scan->nested);
scan->advanceNested = JsonTableNextJoinRow(scan->nested);
if (scan->advanceNested || scan->outerJoin)
break;
}
return true;
}
/*
* JsonTableFetchRow
* Prepare the next "current" tuple for upcoming GetValue calls.
* Returns FALSE if the row-filter expression returned no more rows.
*/
static bool
JsonTableFetchRow(TableFuncScanState *state)
{
JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableFetchRow");
if (cxt->empty)
return false;
return JsonTableNextRow(&cxt->root);
}
/*
* JsonTableGetValue
* Return the value for column number 'colnum' for the current row.
*
* This leaks memory, so be sure to reset often the context in which it's
* called.
*/
static Datum
JsonTableGetValue(TableFuncScanState *state, int colnum,
Oid typid, int32 typmod, bool *isnull)
{
JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableGetValue");
ExprContext *econtext = state->ss.ps.ps_ExprContext;
ExprState *estate = cxt->colexprs[colnum].expr;
JsonTableScanState *scan = cxt->colexprs[colnum].scan;
Datum result;
if (scan->currentIsNull) /* NULL from outer/union join */
{
result = (Datum) 0;
*isnull = true;
}
else if (estate) /* regular column */
{
result = ExecEvalExpr(estate, econtext, isnull);
}
else
{
result = Int32GetDatum(scan->ordinal); /* ordinality column */
*isnull = false;
}
return result;
}
/*
* JsonTableDestroyOpaque
*/
static void
JsonTableDestroyOpaque(TableFuncScanState *state)
{
JsonTableContext *cxt = GetJsonTableContext(state, "JsonTableDestroyOpaque");
/* not valid anymore */
cxt->magic = 0;
state->opaque = NULL;
}
const TableFuncRoutine JsonbTableRoutine =
{
JsonTableInitOpaque,
JsonTableSetDocument,
NULL,
NULL,
NULL,
JsonTableFetchRow,
JsonTableGetValue,
JsonTableDestroyOpaque
};

View File

@@ -466,12 +466,6 @@ static void get_coercion_expr(Node *arg, deparse_context *context,
Node *parentNode);
static void get_const_expr(Const *constval, deparse_context *context,
int showtype);
static void get_json_constructor(JsonConstructorExpr *ctor,
deparse_context *context, bool showimplicit);
static void get_json_agg_constructor(JsonConstructorExpr *ctor,
deparse_context *context,
const char *funcname,
bool is_json_objectagg);
static void get_const_collation(Const *constval, deparse_context *context);
static void simple_quote_literal(StringInfo buf, const char *val);
static void get_sublink_expr(SubLink *sublink, deparse_context *context);
@@ -505,10 +499,6 @@ static char *generate_qualified_type_name(Oid typid);
static text *string_to_text(char *str);
static char *flatten_reloptions(Oid relid);
static void get_reloptions(StringInfo buf, Datum reloptions);
static void get_json_path_spec(Node *path_spec, deparse_context *context,
bool showimplicit);
static void get_json_table_columns(TableFunc *tf, JsonTableParent *node,
deparse_context *context, bool showimplicit);
#define only_marker(rte) ((rte)->inh ? "" : "ONLY ")
@@ -6338,8 +6328,7 @@ get_rule_sortgroupclause(Index ref, List *tlist, bool force_colno,
bool need_paren = (PRETTY_PAREN(context)
|| IsA(expr, FuncExpr)
|| IsA(expr, Aggref)
|| IsA(expr, WindowFunc)
|| IsA(expr, JsonConstructorExpr));
|| IsA(expr, WindowFunc));
if (need_paren)
appendStringInfoChar(context->buf, '(');
@@ -8198,8 +8187,6 @@ isSimpleNode(Node *node, Node *parentNode, int prettyFlags)
case T_GroupingFunc:
case T_WindowFunc:
case T_FuncExpr:
case T_JsonConstructorExpr:
case T_JsonExpr:
/* function-like: name(..) or name[..] */
return true;
@@ -8293,7 +8280,6 @@ isSimpleNode(Node *node, Node *parentNode, int prettyFlags)
case T_NullTest:
case T_BooleanTest:
case T_DistinctExpr:
case T_JsonIsPredicate:
switch (nodeTag(parentNode))
{
case T_FuncExpr:
@@ -8318,7 +8304,6 @@ isSimpleNode(Node *node, Node *parentNode, int prettyFlags)
case T_GroupingFunc: /* own parentheses */
case T_WindowFunc: /* own parentheses */
case T_CaseExpr: /* other separators */
case T_JsonExpr: /* own parentheses */
return true;
default:
return false;
@@ -8375,11 +8360,6 @@ isSimpleNode(Node *node, Node *parentNode, int prettyFlags)
return false;
}
case T_JsonValueExpr:
/* maybe simple, check args */
return isSimpleNode((Node *) ((JsonValueExpr *) node)->raw_expr,
node, prettyFlags);
default:
break;
}
@@ -8486,122 +8466,6 @@ get_rule_expr_paren(Node *node, deparse_context *context,
}
/*
* get_json_path_spec - Parse back a JSON path specification
*/
static void
get_json_path_spec(Node *path_spec, deparse_context *context, bool showimplicit)
{
if (IsA(path_spec, Const))
get_const_expr((Const *) path_spec, context, -1);
else
get_rule_expr(path_spec, context, showimplicit);
}
/*
* get_json_format - Parse back a JsonFormat node
*/
static void
get_json_format(JsonFormat *format, StringInfo buf)
{
if (format->format_type == JS_FORMAT_DEFAULT)
return;
appendStringInfoString(buf,
format->format_type == JS_FORMAT_JSONB ?
" FORMAT JSONB" : " FORMAT JSON");
if (format->encoding != JS_ENC_DEFAULT)
{
const char *encoding =
format->encoding == JS_ENC_UTF16 ? "UTF16" :
format->encoding == JS_ENC_UTF32 ? "UTF32" : "UTF8";
appendStringInfo(buf, " ENCODING %s", encoding);
}
}
/*
* get_json_returning - Parse back a JsonReturning structure
*/
static void
get_json_returning(JsonReturning *returning, StringInfo buf,
bool json_format_by_default)
{
if (!OidIsValid(returning->typid))
return;
appendStringInfo(buf, " RETURNING %s",
format_type_with_typemod(returning->typid,
returning->typmod));
if (!json_format_by_default ||
returning->format->format_type !=
(returning->typid == JSONBOID ? JS_FORMAT_JSONB : JS_FORMAT_JSON))
get_json_format(returning->format, buf);
}
static void
get_json_behavior(JsonBehavior *behavior, deparse_context *context,
const char *on)
{
/*
* The order of array elements must correspond to the order of
* JsonBehaviorType members.
*/
const char *behavior_names[] =
{
" NULL",
" ERROR",
" EMPTY",
" TRUE",
" FALSE",
" UNKNOWN",
" EMPTY ARRAY",
" EMPTY OBJECT",
" DEFAULT "
};
if ((int) behavior->btype < 0 || behavior->btype >= lengthof(behavior_names))
elog(ERROR, "invalid json behavior type: %d", behavior->btype);
appendStringInfoString(context->buf, behavior_names[behavior->btype]);
if (behavior->btype == JSON_BEHAVIOR_DEFAULT)
get_rule_expr(behavior->default_expr, context, false);
appendStringInfo(context->buf, " ON %s", on);
}
/*
* get_json_expr_options
*
* Parse back common options for JSON_QUERY, JSON_VALUE, JSON_EXISTS and
* JSON_TABLE columns.
*/
static void
get_json_expr_options(JsonExpr *jsexpr, deparse_context *context,
JsonBehaviorType default_behavior)
{
if (jsexpr->op == JSON_QUERY_OP)
{
if (jsexpr->wrapper == JSW_CONDITIONAL)
appendStringInfo(context->buf, " WITH CONDITIONAL WRAPPER");
else if (jsexpr->wrapper == JSW_UNCONDITIONAL)
appendStringInfo(context->buf, " WITH UNCONDITIONAL WRAPPER");
if (jsexpr->omit_quotes)
appendStringInfo(context->buf, " OMIT QUOTES");
}
if (jsexpr->op != JSON_EXISTS_OP &&
jsexpr->on_empty->btype != default_behavior)
get_json_behavior(jsexpr->on_empty, context, "EMPTY");
if (jsexpr->on_error->btype != default_behavior)
get_json_behavior(jsexpr->on_error, context, "ERROR");
}
/* ----------
* get_rule_expr - Parse back an expression
*
@@ -9760,116 +9624,6 @@ get_rule_expr(Node *node, deparse_context *context,
}
break;
case T_JsonValueExpr:
{
JsonValueExpr *jve = (JsonValueExpr *) node;
get_rule_expr((Node *) jve->raw_expr, context, false);
get_json_format(jve->format, context->buf);
}
break;
case T_JsonConstructorExpr:
get_json_constructor((JsonConstructorExpr *) node, context, false);
break;
case T_JsonIsPredicate:
{
JsonIsPredicate *pred = (JsonIsPredicate *) node;
if (!PRETTY_PAREN(context))
appendStringInfoChar(context->buf, '(');
get_rule_expr_paren(pred->expr, context, true, node);
appendStringInfoString(context->buf, " IS JSON");
/* TODO: handle FORMAT clause */
switch (pred->item_type)
{
case JS_TYPE_SCALAR:
appendStringInfoString(context->buf, " SCALAR");
break;
case JS_TYPE_ARRAY:
appendStringInfoString(context->buf, " ARRAY");
break;
case JS_TYPE_OBJECT:
appendStringInfoString(context->buf, " OBJECT");
break;
default:
break;
}
if (pred->unique_keys)
appendStringInfoString(context->buf, " WITH UNIQUE KEYS");
if (!PRETTY_PAREN(context))
appendStringInfoChar(context->buf, ')');
}
break;
case T_JsonExpr:
{
JsonExpr *jexpr = (JsonExpr *) node;
switch (jexpr->op)
{
case JSON_QUERY_OP:
appendStringInfoString(buf, "JSON_QUERY(");
break;
case JSON_VALUE_OP:
appendStringInfoString(buf, "JSON_VALUE(");
break;
case JSON_EXISTS_OP:
appendStringInfoString(buf, "JSON_EXISTS(");
break;
default:
elog(ERROR, "unexpected JsonExpr type: %d", jexpr->op);
break;
}
get_rule_expr(jexpr->formatted_expr, context, showimplicit);
appendStringInfoString(buf, ", ");
get_json_path_spec(jexpr->path_spec, context, showimplicit);
if (jexpr->passing_values)
{
ListCell *lc1,
*lc2;
bool needcomma = false;
appendStringInfoString(buf, " PASSING ");
forboth(lc1, jexpr->passing_names,
lc2, jexpr->passing_values)
{
if (needcomma)
appendStringInfoString(buf, ", ");
needcomma = true;
get_rule_expr((Node *) lfirst(lc2), context, showimplicit);
appendStringInfo(buf, " AS %s",
((String *) lfirst_node(String, lc1))->sval);
}
}
if (jexpr->op != JSON_EXISTS_OP ||
jexpr->returning->typid != BOOLOID)
get_json_returning(jexpr->returning, context->buf,
jexpr->op == JSON_QUERY_OP);
get_json_expr_options(jexpr, context,
jexpr->op == JSON_EXISTS_OP ?
JSON_BEHAVIOR_FALSE : JSON_BEHAVIOR_NULL);
appendStringInfoString(buf, ")");
}
break;
case T_List:
{
char *sep;
@@ -9993,7 +9747,6 @@ looks_like_function(Node *node)
case T_MinMaxExpr:
case T_SQLValueFunction:
case T_XmlExpr:
case T_JsonExpr:
/* these are all accepted by func_expr_common_subexpr */
return true;
default:
@@ -10139,103 +9892,17 @@ get_func_expr(FuncExpr *expr, deparse_context *context,
appendStringInfoChar(buf, ')');
}
static void
get_json_constructor_options(JsonConstructorExpr *ctor, StringInfo buf)
{
if (ctor->absent_on_null)
{
if (ctor->type == JSCTOR_JSON_OBJECT ||
ctor->type == JSCTOR_JSON_OBJECTAGG)
appendStringInfoString(buf, " ABSENT ON NULL");
}
else
{
if (ctor->type == JSCTOR_JSON_ARRAY ||
ctor->type == JSCTOR_JSON_ARRAYAGG)
appendStringInfoString(buf, " NULL ON NULL");
}
if (ctor->unique)
appendStringInfoString(buf, " WITH UNIQUE KEYS");
if (!((ctor->type == JSCTOR_JSON_PARSE ||
ctor->type == JSCTOR_JSON_SCALAR) &&
ctor->returning->typid == JSONOID))
get_json_returning(ctor->returning, buf, true);
}
static void
get_json_constructor(JsonConstructorExpr *ctor, deparse_context *context,
bool showimplicit)
{
StringInfo buf = context->buf;
const char *funcname;
int nargs;
ListCell *lc;
switch (ctor->type)
{
case JSCTOR_JSON_PARSE:
funcname = "JSON";
break;
case JSCTOR_JSON_SCALAR:
funcname = "JSON_SCALAR";
break;
case JSCTOR_JSON_SERIALIZE:
funcname = "JSON_SERIALIZE";
break;
case JSCTOR_JSON_OBJECT:
funcname = "JSON_OBJECT";
break;
case JSCTOR_JSON_ARRAY:
funcname = "JSON_ARRAY";
break;
case JSCTOR_JSON_OBJECTAGG:
get_json_agg_constructor(ctor, context, "JSON_OBJECTAGG", true);
return;
case JSCTOR_JSON_ARRAYAGG:
get_json_agg_constructor(ctor, context, "JSON_ARRAYAGG", false);
return;
default:
elog(ERROR, "invalid JsonConstructorExprType %d", ctor->type);
}
appendStringInfo(buf, "%s(", funcname);
nargs = 0;
foreach(lc, ctor->args)
{
if (nargs > 0)
{
const char *sep = ctor->type == JSCTOR_JSON_OBJECT &&
(nargs % 2) != 0 ? " : " : ", ";
appendStringInfoString(buf, sep);
}
get_rule_expr((Node *) lfirst(lc), context, true);
nargs++;
}
get_json_constructor_options(ctor, buf);
appendStringInfo(buf, ")");
}
/*
* get_agg_expr_helper - Parse back an Aggref node
* get_agg_expr - Parse back an Aggref node
*/
static void
get_agg_expr_helper(Aggref *aggref, deparse_context *context,
Aggref *original_aggref, const char *funcname,
const char *options, bool is_json_objectagg)
get_agg_expr(Aggref *aggref, deparse_context *context,
Aggref *original_aggref)
{
StringInfo buf = context->buf;
Oid argtypes[FUNC_MAX_ARGS];
int nargs;
bool use_variadic = false;
bool use_variadic;
/*
* For a combining aggregate, we look up and deparse the corresponding
@@ -10265,14 +9932,13 @@ get_agg_expr_helper(Aggref *aggref, deparse_context *context,
/* Extract the argument types as seen by the parser */
nargs = get_aggregate_argtypes(aggref, argtypes);
if (!funcname)
funcname = generate_function_name(aggref->aggfnoid, nargs, NIL,
argtypes, aggref->aggvariadic,
&use_variadic,
context->special_exprkind);
/* Print the aggregate name, schema-qualified if needed */
appendStringInfo(buf, "%s(%s", funcname,
appendStringInfo(buf, "%s(%s",
generate_function_name(aggref->aggfnoid, nargs,
NIL, argtypes,
aggref->aggvariadic,
&use_variadic,
context->special_exprkind),
(aggref->aggdistinct != NIL) ? "DISTINCT " : "");
if (AGGKIND_IS_ORDERED_SET(aggref->aggkind))
@@ -10308,18 +9974,7 @@ get_agg_expr_helper(Aggref *aggref, deparse_context *context,
if (tle->resjunk)
continue;
if (i++ > 0)
{
if (is_json_objectagg)
{
if (i > 2)
break; /* skip ABSENT ON NULL and WITH UNIQUE
* args */
appendStringInfoString(buf, " : ");
}
else
appendStringInfoString(buf, ", ");
}
appendStringInfoString(buf, ", ");
if (use_variadic && i == nargs)
appendStringInfoString(buf, "VARIADIC ");
get_rule_expr(arg, context, true);
@@ -10333,9 +9988,6 @@ get_agg_expr_helper(Aggref *aggref, deparse_context *context,
}
}
if (options)
appendStringInfoString(buf, options);
if (aggref->aggfilter != NULL)
{
appendStringInfoString(buf, ") FILTER (WHERE ");
@@ -10345,16 +9997,6 @@ get_agg_expr_helper(Aggref *aggref, deparse_context *context,
appendStringInfoChar(buf, ')');
}
/*
* get_agg_expr - Parse back an Aggref node
*/
static void
get_agg_expr(Aggref *aggref, deparse_context *context, Aggref *original_aggref)
{
get_agg_expr_helper(aggref, context, original_aggref, NULL, NULL,
false);
}
/*
* This is a helper function for get_agg_expr(). It's used when we deparse
* a combining Aggref; resolve_special_varno locates the corresponding partial
@@ -10374,12 +10016,10 @@ get_agg_combine_expr(Node *node, deparse_context *context, void *callback_arg)
}
/*
* get_windowfunc_expr_helper - Parse back a WindowFunc node
* get_windowfunc_expr - Parse back a WindowFunc node
*/
static void
get_windowfunc_expr_helper(WindowFunc *wfunc, deparse_context *context,
const char *funcname, const char *options,
bool is_json_objectagg)
get_windowfunc_expr(WindowFunc *wfunc, deparse_context *context)
{
StringInfo buf = context->buf;
Oid argtypes[FUNC_MAX_ARGS];
@@ -10403,30 +10043,16 @@ get_windowfunc_expr_helper(WindowFunc *wfunc, deparse_context *context,
nargs++;
}
if (!funcname)
funcname = generate_function_name(wfunc->winfnoid, nargs, argnames,
argtypes, false, NULL,
context->special_exprkind);
appendStringInfo(buf, "%s(", funcname);
appendStringInfo(buf, "%s(",
generate_function_name(wfunc->winfnoid, nargs,
argnames, argtypes,
false, NULL,
context->special_exprkind));
/* winstar can be set only in zero-argument aggregates */
if (wfunc->winstar)
appendStringInfoChar(buf, '*');
else
{
if (is_json_objectagg)
{
get_rule_expr((Node *) linitial(wfunc->args), context, false);
appendStringInfoString(buf, " : ");
get_rule_expr((Node *) lsecond(wfunc->args), context, false);
}
else
get_rule_expr((Node *) wfunc->args, context, true);
}
if (options)
appendStringInfoString(buf, options);
get_rule_expr((Node *) wfunc->args, context, true);
if (wfunc->aggfilter != NULL)
{
@@ -10463,15 +10089,6 @@ get_windowfunc_expr_helper(WindowFunc *wfunc, deparse_context *context,
}
}
/*
* get_windowfunc_expr - Parse back a WindowFunc node
*/
static void
get_windowfunc_expr(WindowFunc *wfunc, deparse_context *context)
{
get_windowfunc_expr_helper(wfunc, context, NULL, NULL, false);
}
/*
* get_func_sql_syntax - Parse back a SQL-syntax function call
*
@@ -10712,31 +10329,6 @@ get_func_sql_syntax(FuncExpr *expr, deparse_context *context)
return false;
}
/*
* get_json_agg_constructor - Parse back an aggregate JsonConstructorExpr node
*/
static void
get_json_agg_constructor(JsonConstructorExpr *ctor, deparse_context *context,
const char *funcname, bool is_json_objectagg)
{
StringInfoData options;
initStringInfo(&options);
get_json_constructor_options(ctor, &options);
if (IsA(ctor->func, Aggref))
get_agg_expr_helper((Aggref *) ctor->func, context,
(Aggref *) ctor->func,
funcname, options.data, is_json_objectagg);
else if (IsA(ctor->func, WindowFunc))
get_windowfunc_expr_helper((WindowFunc *) ctor->func, context,
funcname, options.data,
is_json_objectagg);
else
elog(ERROR, "invalid JsonConstructorExpr underlying node type: %d",
nodeTag(ctor->func));
}
/* ----------
* get_coercion_expr
*
@@ -11106,14 +10698,16 @@ get_sublink_expr(SubLink *sublink, deparse_context *context)
/* ----------
* get_xmltable - Parse back a XMLTABLE function
* get_tablefunc - Parse back a table function
* ----------
*/
static void
get_xmltable(TableFunc *tf, deparse_context *context, bool showimplicit)
get_tablefunc(TableFunc *tf, deparse_context *context, bool showimplicit)
{
StringInfo buf = context->buf;
/* XMLTABLE is the only existing implementation. */
appendStringInfoString(buf, "XMLTABLE(");
if (tf->ns_uris != NIL)
@@ -11204,271 +10798,6 @@ get_xmltable(TableFunc *tf, deparse_context *context, bool showimplicit)
appendStringInfoChar(buf, ')');
}
/*
* get_json_nested_columns - Parse back nested JSON_TABLE columns
*/
static void
get_json_table_nested_columns(TableFunc *tf, Node *node,
deparse_context *context, bool showimplicit,
bool needcomma)
{
if (IsA(node, JsonTableSibling))
{
JsonTableSibling *n = (JsonTableSibling *) node;
get_json_table_nested_columns(tf, n->larg, context, showimplicit,
needcomma);
get_json_table_nested_columns(tf, n->rarg, context, showimplicit, true);
}
else
{
JsonTableParent *n = castNode(JsonTableParent, node);
if (needcomma)
appendStringInfoChar(context->buf, ',');
appendStringInfoChar(context->buf, ' ');
appendContextKeyword(context, "NESTED PATH ", 0, 0, 0);
get_const_expr(n->path, context, -1);
appendStringInfo(context->buf, " AS %s", quote_identifier(n->name));
get_json_table_columns(tf, n, context, showimplicit);
}
}
/*
* get_json_table_plan - Parse back a JSON_TABLE plan
*/
static void
get_json_table_plan(TableFunc *tf, Node *node, deparse_context *context,
bool parenthesize)
{
if (parenthesize)
appendStringInfoChar(context->buf, '(');
if (IsA(node, JsonTableSibling))
{
JsonTableSibling *n = (JsonTableSibling *) node;
get_json_table_plan(tf, n->larg, context,
IsA(n->larg, JsonTableSibling) ||
castNode(JsonTableParent, n->larg)->child);
appendStringInfoString(context->buf, n->cross ? " CROSS " : " UNION ");
get_json_table_plan(tf, n->rarg, context,
IsA(n->rarg, JsonTableSibling) ||
castNode(JsonTableParent, n->rarg)->child);
}
else
{
JsonTableParent *n = castNode(JsonTableParent, node);
appendStringInfoString(context->buf, quote_identifier(n->name));
if (n->child)
{
appendStringInfoString(context->buf,
n->outerJoin ? " OUTER " : " INNER ");
get_json_table_plan(tf, n->child, context,
IsA(n->child, JsonTableSibling));
}
}
if (parenthesize)
appendStringInfoChar(context->buf, ')');
}
/*
* get_json_table_columns - Parse back JSON_TABLE columns
*/
static void
get_json_table_columns(TableFunc *tf, JsonTableParent *node,
deparse_context *context, bool showimplicit)
{
StringInfo buf = context->buf;
JsonExpr *jexpr = castNode(JsonExpr, tf->docexpr);
ListCell *lc_colname;
ListCell *lc_coltype;
ListCell *lc_coltypmod;
ListCell *lc_colvarexpr;
int colnum = 0;
appendStringInfoChar(buf, ' ');
appendContextKeyword(context, "COLUMNS (", 0, 0, 0);
if (PRETTY_INDENT(context))
context->indentLevel += PRETTYINDENT_VAR;
forfour(lc_colname, tf->colnames,
lc_coltype, tf->coltypes,
lc_coltypmod, tf->coltypmods,
lc_colvarexpr, tf->colvalexprs)
{
char *colname = strVal(lfirst(lc_colname));
JsonExpr *colexpr;
Oid typid;
int32 typmod;
bool ordinality;
JsonBehaviorType default_behavior;
typid = lfirst_oid(lc_coltype);
typmod = lfirst_int(lc_coltypmod);
colexpr = castNode(JsonExpr, lfirst(lc_colvarexpr));
if (colnum < node->colMin)
{
colnum++;
continue;
}
if (colnum > node->colMax)
break;
if (colnum > node->colMin)
appendStringInfoString(buf, ", ");
colnum++;
ordinality = !colexpr;
appendContextKeyword(context, "", 0, 0, 0);
appendStringInfo(buf, "%s %s", quote_identifier(colname),
ordinality ? "FOR ORDINALITY" :
format_type_with_typemod(typid, typmod));
if (ordinality)
continue;
if (colexpr->op == JSON_EXISTS_OP)
{
appendStringInfoString(buf, " EXISTS");
default_behavior = JSON_BEHAVIOR_FALSE;
}
else
{
if (colexpr->op == JSON_QUERY_OP)
{
char typcategory;
bool typispreferred;
get_type_category_preferred(typid, &typcategory, &typispreferred);
if (typcategory == TYPCATEGORY_STRING)
appendStringInfoString(buf,
colexpr->format->format_type == JS_FORMAT_JSONB ?
" FORMAT JSONB" : " FORMAT JSON");
}
default_behavior = JSON_BEHAVIOR_NULL;
}
if (jexpr->on_error->btype == JSON_BEHAVIOR_ERROR)
default_behavior = JSON_BEHAVIOR_ERROR;
appendStringInfoString(buf, " PATH ");
get_json_path_spec(colexpr->path_spec, context, showimplicit);
get_json_expr_options(colexpr, context, default_behavior);
}
if (node->child)
get_json_table_nested_columns(tf, node->child, context, showimplicit,
node->colMax >= node->colMin);
if (PRETTY_INDENT(context))
context->indentLevel -= PRETTYINDENT_VAR;
appendContextKeyword(context, ")", 0, 0, 0);
}
/* ----------
* get_json_table - Parse back a JSON_TABLE function
* ----------
*/
static void
get_json_table(TableFunc *tf, deparse_context *context, bool showimplicit)
{
StringInfo buf = context->buf;
JsonExpr *jexpr = castNode(JsonExpr, tf->docexpr);
JsonTableParent *root = castNode(JsonTableParent, tf->plan);
appendStringInfoString(buf, "JSON_TABLE(");
if (PRETTY_INDENT(context))
context->indentLevel += PRETTYINDENT_VAR;
appendContextKeyword(context, "", 0, 0, 0);
get_rule_expr(jexpr->formatted_expr, context, showimplicit);
appendStringInfoString(buf, ", ");
get_const_expr(root->path, context, -1);
appendStringInfo(buf, " AS %s", quote_identifier(root->name));
if (jexpr->passing_values)
{
ListCell *lc1,
*lc2;
bool needcomma = false;
appendStringInfoChar(buf, ' ');
appendContextKeyword(context, "PASSING ", 0, 0, 0);
if (PRETTY_INDENT(context))
context->indentLevel += PRETTYINDENT_VAR;
forboth(lc1, jexpr->passing_names,
lc2, jexpr->passing_values)
{
if (needcomma)
appendStringInfoString(buf, ", ");
needcomma = true;
appendContextKeyword(context, "", 0, 0, 0);
get_rule_expr((Node *) lfirst(lc2), context, false);
appendStringInfo(buf, " AS %s",
quote_identifier((lfirst_node(String, lc1))->sval)
);
}
if (PRETTY_INDENT(context))
context->indentLevel -= PRETTYINDENT_VAR;
}
get_json_table_columns(tf, root, context, showimplicit);
appendStringInfoChar(buf, ' ');
appendContextKeyword(context, "PLAN ", 0, 0, 0);
get_json_table_plan(tf, (Node *) root, context, true);
if (jexpr->on_error->btype != JSON_BEHAVIOR_EMPTY)
get_json_behavior(jexpr->on_error, context, "ERROR");
if (PRETTY_INDENT(context))
context->indentLevel -= PRETTYINDENT_VAR;
appendContextKeyword(context, ")", 0, 0, 0);
}
/* ----------
* get_tablefunc - Parse back a table function
* ----------
*/
static void
get_tablefunc(TableFunc *tf, deparse_context *context, bool showimplicit)
{
/* XMLTABLE and JSON_TABLE are the only existing implementations. */
if (tf->functype == TFT_XMLTABLE)
get_xmltable(tf, context, showimplicit);
else if (tf->functype == TFT_JSON_TABLE)
get_json_table(tf, context, showimplicit);
}
/* ----------
* get_from_clause - Parse back a FROM clause
*

View File

@@ -737,76 +737,6 @@ JumbleExpr(JumbleState *jstate, Node *node)
JumbleExpr(jstate, (Node *) conf->exclRelTlist);
}
break;
case T_JsonFormat:
{
JsonFormat *format = (JsonFormat *) node;
APP_JUMB(format->format_type);
APP_JUMB(format->encoding);
}
break;
case T_JsonReturning:
{
JsonReturning *returning = (JsonReturning *) node;
JumbleExpr(jstate, (Node *) returning->format);
APP_JUMB(returning->typid);
APP_JUMB(returning->typmod);
}
break;
case T_JsonValueExpr:
{
JsonValueExpr *expr = (JsonValueExpr *) node;
JumbleExpr(jstate, (Node *) expr->raw_expr);
JumbleExpr(jstate, (Node *) expr->formatted_expr);
JumbleExpr(jstate, (Node *) expr->format);
}
break;
case T_JsonConstructorExpr:
{
JsonConstructorExpr *ctor = (JsonConstructorExpr *) node;
APP_JUMB(ctor->type);
JumbleExpr(jstate, (Node *) ctor->args);
JumbleExpr(jstate, (Node *) ctor->func);
JumbleExpr(jstate, (Node *) ctor->coercion);
JumbleExpr(jstate, (Node *) ctor->returning);
APP_JUMB(ctor->absent_on_null);
APP_JUMB(ctor->unique);
}
break;
case T_JsonIsPredicate:
{
JsonIsPredicate *pred = (JsonIsPredicate *) node;
JumbleExpr(jstate, (Node *) pred->expr);
JumbleExpr(jstate, (Node *) pred->format);
APP_JUMB(pred->item_type);
APP_JUMB(pred->unique_keys);
}
break;
case T_JsonExpr:
{
JsonExpr *jexpr = (JsonExpr *) node;
APP_JUMB(jexpr->op);
JumbleExpr(jstate, jexpr->formatted_expr);
JumbleExpr(jstate, jexpr->path_spec);
foreach(temp, jexpr->passing_names)
{
APP_JUMB_STRING(lfirst_node(String, temp)->sval);
}
JumbleExpr(jstate, (Node *) jexpr->passing_values);
if (jexpr->on_empty)
{
APP_JUMB(jexpr->on_empty->btype);
JumbleExpr(jstate, jexpr->on_empty->default_expr);
}
APP_JUMB(jexpr->on_error->btype);
JumbleExpr(jstate, jexpr->on_error->default_expr);
}
break;
case T_List:
foreach(temp, (List *) node)
{
@@ -879,11 +809,9 @@ JumbleExpr(JumbleState *jstate, Node *node)
{
TableFunc *tablefunc = (TableFunc *) node;
APP_JUMB(tablefunc->functype);
JumbleExpr(jstate, tablefunc->docexpr);
JumbleExpr(jstate, tablefunc->rowexpr);
JumbleExpr(jstate, (Node *) tablefunc->colexprs);
JumbleExpr(jstate, (Node *) tablefunc->colvalexprs);
}
break;
case T_TableSampleClause: