1
0
mirror of https://github.com/mariadb-corporation/mariadb-columnstore-engine.git synced 2025-11-03 17:13:17 +03:00

MDEV-32854: Make JSON_DEPTH_LIMIT unlimited

This patch is the columnstore-part of the task. Columnstore wanted to have
previous 32 depth, so this patch aims at keeping the compatibility.
This commit is contained in:
Rucha Deodhar
2025-05-22 15:42:12 +05:30
committed by Leonid Fedorov
parent 9a2ebebaf9
commit 9fe37d5919
50 changed files with 1047 additions and 952 deletions

View File

@@ -8,23 +8,20 @@ USE json_valid_db;
# String literal - valid JSON
#
CREATE TABLE t1(l LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('123'),
('-123'),
('5000000000'),
('-5000000000'),
('1.23'),
('"123"'),
('true'),
('false'),
('null'),
('{"address": "Trondheim"}'),
(JSON_OBJECT()),
(JSON_OBJECT(1, 2)),
(JSON_ARRAY()),
(JSON_ARRAY(1, 2));
INSERT INTO t1 VALUES ('123');
INSERT INTO t1 VALUES ('-123');
INSERT INTO t1 VALUES ('5000000000');
INSERT INTO t1 VALUES ('-5000000000');
INSERT INTO t1 VALUES ('1.23');
INSERT INTO t1 VALUES ('"123"');
INSERT INTO t1 VALUES ('true');
INSERT INTO t1 VALUES ('false');
INSERT INTO t1 VALUES ('null');
INSERT INTO t1 VALUES ('{"address": "Trondheim"}');
INSERT INTO t1 VALUES (JSON_OBJECT());
INSERT INTO t1 VALUES (JSON_OBJECT(1, 2));
INSERT INTO t1 VALUES (JSON_ARRAY());
INSERT INTO t1 VALUES (JSON_ARRAY(1, 2));
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
@@ -50,14 +47,11 @@ null 1 1
# String literal - invalid JSON
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
('12 3'),
('{key:value}'),
('{key:value'),
('[1,2,]'),
('[1,2');
INSERT INTO t1 VALUES ('12 3');
INSERT INTO t1 VALUES ('{key:value}');
INSERT INTO t1 VALUES ('{key:value');
INSERT INTO t1 VALUES ('[1,2,]');
INSERT INTO t1 VALUES ('[1,2');
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
@@ -74,12 +68,8 @@ raw is_valid compact
# String literal - not in UTF-8
#
TRUNCATE t1;
SET
NAMES 'ascii';
INSERT INTO
t1
VALUES
('123');
SET NAMES 'ascii';
INSERT INTO t1 VALUES ('123');
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
@@ -88,16 +78,12 @@ FROM
t1;
raw is_valid compact
123 1 1
SET
NAMES 'utf8';
SET NAMES 'utf8';
#
# Bare NULL
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(NULL);
INSERT INTO t1 VALUES (NULL);
SELECT
JSON_VALID(l)
FROM
@@ -108,10 +94,7 @@ NULL
# Function result - string
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(UPPER('"abc"'));
INSERT INTO t1 VALUES (UPPER('"abc"'));
SELECT
JSON_VALID(l)
FROM
@@ -122,28 +105,20 @@ JSON_VALID(l)
# Function result - string not in UTF-8
#
TRUNCATE t1;
SET
NAMES 'latin1';
INSERT INTO
t1
VALUES
(UPPER('"abc"'));
SET NAMES 'latin1';
INSERT INTO t1 VALUES (UPPER('"abc"'));
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
1
SET
NAMES 'utf8';
SET NAMES 'utf8';
#
# Function result - date, not valid as JSON without CAST
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(CAST('2015-01-15' AS DATE));
INSERT INTO t1 VALUES (CAST('2015-01-15' AS DATE));
SELECT
JSON_VALID(l)
FROM
@@ -154,13 +129,10 @@ JSON_VALID(l)
# The date string doesn't parse as JSON text, so wrong:
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(
INSERT INTO t1 VALUES (
CAST(
CAST('2015-01-15' AS DATE) AS CHAR CHARACTER SET 'utf8'
)
)
);
SELECT
JSON_VALID(l)
@@ -172,14 +144,8 @@ JSON_VALID(l)
# Function result - NULL
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(UPPER(NULL));
INSERT INTO
t1
VALUES
(UPPER(CAST(NULL AS CHAR)));
INSERT INTO t1 VALUES (UPPER(NULL));
INSERT INTO t1 VALUES (UPPER(CAST(NULL AS CHAR)));
SELECT
JSON_VALID(l)
FROM