summaryrefslogtreecommitdiff
path: root/src/backend/utils/adt
diff options
context:
space:
mode:
Diffstat (limited to 'src/backend/utils/adt')
-rw-r--r--src/backend/utils/adt/json.c105
-rw-r--r--src/backend/utils/adt/jsonfuncs.c20
-rw-r--r--src/backend/utils/adt/ruleutils.c35
3 files changed, 148 insertions, 12 deletions
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index d088fafc56..5edcb8bb60 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -13,6 +13,7 @@
*/
#include "postgres.h"
+#include "access/hash.h"
#include "catalog/pg_proc.h"
#include "catalog/pg_type.h"
#include "common/hashfn.h"
@@ -1655,6 +1656,94 @@ escape_json(StringInfo buf, const char *str)
appendStringInfoCharMacro(buf, '"');
}
+/* Semantic actions for key uniqueness check */
+static void
+json_unique_object_start(void *_state)
+{
+ JsonUniqueParsingState *state = _state;
+ JsonUniqueStackEntry *entry;
+
+ if (!state->unique)
+ return;
+
+ /* push object entry to stack */
+ entry = palloc(sizeof(*entry));
+ entry->object_id = state->id_counter++;
+ entry->parent = state->stack;
+ state->stack = entry;
+}
+
+static void
+json_unique_object_end(void *_state)
+{
+ JsonUniqueParsingState *state = _state;
+ JsonUniqueStackEntry *entry;
+
+ if (!state->unique)
+ return;
+
+ entry = state->stack;
+ state->stack = entry->parent; /* pop object from stack */
+ pfree(entry);
+}
+
+static void
+json_unique_object_field_start(void *_state, char *field, bool isnull)
+{
+ JsonUniqueParsingState *state = _state;
+ JsonUniqueStackEntry *entry;
+
+ if (!state->unique)
+ return;
+
+ /* find key collision in the current object */
+ if (json_unique_check_key(&state->check, field, state->stack->object_id))
+ return;
+
+ state->unique = false;
+
+ /* pop all objects entries */
+ while ((entry = state->stack))
+ {
+ state->stack = entry->parent;
+ pfree(entry);
+ }
+}
+
+/* Validate JSON text and additionally check key uniqueness */
+bool
+json_validate(text *json, bool check_unique_keys)
+{
+ JsonLexContext *lex = makeJsonLexContext(json, check_unique_keys);
+ JsonSemAction uniqueSemAction = {0};
+ JsonUniqueParsingState state;
+ JsonParseErrorType result;
+
+ if (check_unique_keys)
+ {
+ state.lex = lex;
+ state.stack = NULL;
+ state.id_counter = 0;
+ state.unique = true;
+ json_unique_check_init(&state.check);
+
+ uniqueSemAction.semstate = &state;
+ uniqueSemAction.object_start = json_unique_object_start;
+ uniqueSemAction.object_field_start = json_unique_object_field_start;
+ uniqueSemAction.object_end = json_unique_object_end;
+ }
+
+ result = pg_parse_json(lex, check_unique_keys ? &uniqueSemAction : &nullSemAction);
+
+ if (result != JSON_SUCCESS)
+ return false; /* invalid json */
+
+ if (check_unique_keys && !state.unique)
+ return false; /* not unique keys */
+
+ return true; /* ok */
+}
+
/*
* SQL function json_typeof(json) -> text
*
@@ -1670,21 +1759,13 @@ escape_json(StringInfo buf, const char *str)
Datum
json_typeof(PG_FUNCTION_ARGS)
{
- text *json;
-
- JsonLexContext *lex;
- JsonTokenType tok;
+ text *json = PG_GETARG_TEXT_PP(0);
char *type;
- JsonParseErrorType result;
-
- json = PG_GETARG_TEXT_PP(0);
- lex = makeJsonLexContext(json, false);
+ JsonTokenType tok;
/* Lex exactly one token from the input and check its type. */
- result = json_lex(lex);
- if (result != JSON_SUCCESS)
- json_ereport_error(result, lex);
- tok = lex->token_type;
+ tok = json_get_first_token(json, true);
+
switch (tok)
{
case JSON_TOKEN_OBJECT_START:
diff --git a/src/backend/utils/adt/jsonfuncs.c b/src/backend/utils/adt/jsonfuncs.c
index 29664aa6e4..a24d498b06 100644
--- a/src/backend/utils/adt/jsonfuncs.c
+++ b/src/backend/utils/adt/jsonfuncs.c
@@ -5528,3 +5528,23 @@ transform_string_values_scalar(void *state, char *token, JsonTokenType tokentype
else
appendStringInfoString(_state->strval, token);
}
+
+JsonTokenType
+json_get_first_token(text *json, bool throw_error)
+{
+ JsonLexContext *lex;
+ JsonParseErrorType result;
+
+ lex = makeJsonLexContext(json, false);
+
+ /* Lex exactly one token from the input and check its type. */
+ result = json_lex(lex);
+
+ if (result == JSON_SUCCESS)
+ return lex->token_type;
+
+ if (throw_error)
+ json_ereport_error(result, lex);
+
+ return JSON_TOKEN_INVALID; /* invalid json */
+}
diff --git a/src/backend/utils/adt/ruleutils.c b/src/backend/utils/adt/ruleutils.c
index 38da269519..0ed774f6e6 100644
--- a/src/backend/utils/adt/ruleutils.c
+++ b/src/backend/utils/adt/ruleutils.c
@@ -8230,6 +8230,7 @@ isSimpleNode(Node *node, Node *parentNode, int prettyFlags)
case T_NullTest:
case T_BooleanTest:
case T_DistinctExpr:
+ case T_JsonIsPredicate:
switch (nodeTag(parentNode))
{
case T_FuncExpr:
@@ -9635,6 +9636,40 @@ get_rule_expr(Node *node, deparse_context *context,
get_json_constructor((JsonConstructorExpr *) node, context, false);
break;
+ case T_JsonIsPredicate:
+ {
+ JsonIsPredicate *pred = (JsonIsPredicate *) node;
+
+ if (!PRETTY_PAREN(context))
+ appendStringInfoChar(context->buf, '(');
+
+ get_rule_expr_paren(pred->expr, context, true, node);
+
+ appendStringInfoString(context->buf, " IS JSON");
+
+ switch (pred->value_type)
+ {
+ case JS_TYPE_SCALAR:
+ appendStringInfoString(context->buf, " SCALAR");
+ break;
+ case JS_TYPE_ARRAY:
+ appendStringInfoString(context->buf, " ARRAY");
+ break;
+ case JS_TYPE_OBJECT:
+ appendStringInfoString(context->buf, " OBJECT");
+ break;
+ default:
+ break;
+ }
+
+ if (pred->unique_keys)
+ appendStringInfoString(context->buf, " WITH UNIQUE KEYS");
+
+ if (!PRETTY_PAREN(context))
+ appendStringInfoChar(context->buf, ')');
+ }
+ break;
+
case T_List:
{
char *sep;