1
0
mirror of https://github.com/mariadb-corporation/mariadb-columnstore-engine.git synced 2025-07-29 08:21:15 +03:00

MCOL-785 Implement DISTRIBUTED JSON functions

The following functions are created:
Create function JSON_VALID and test cases
Create function JSON_DEPTH and test cases
Create function JSON_LENGTH and test cases
Create function JSON_EQUALS and test cases
Create function JSON_NORMALIZE and test cases
Create function JSON_TYPE and test cases
Create function JSON_OBJECT and test cases
Create function JSON_ARRAY and test cases
Create function JSON_KEYS and test cases
Create function JSON_EXISTS and test cases
Create function JSON_QUOTE/JSON_UNQUOTE and test cases
Create function JSON_COMPACT/DETAILED/LOOSE and test cases
Create function JSON_MERGE and test cases
Create function JSON_MERGE_PATCH and test cases
Create function JSON_VALUE and test cases
Create function JSON_QUERY and test cases
Create function JSON_CONTAINS and test cases
Create function JSON_ARRAY_APPEND and test cases
Create function JSON_ARRAY_INSERT and test cases
Create function JSON_INSERT/REPLACE/SET and test cases
Create function JSON_REMOVE and test cases
Create function JSON_CONTAINS_PATH and test cases
Create function JSON_OVERLAPS and test cases
Create function JSON_EXTRACT and test cases
Create function JSON_SEARCH and test cases

Note:
Some functions output differs from MDB because session variables that affects functions output,e.g JSON_QUOTE/JSON_UNQUOTE
This depends on MCOL-5212
This commit is contained in:
Ziy1-Tan
2022-06-23 10:49:29 +08:00
parent b5d8e0324b
commit cdd41f05f3
82 changed files with 8645 additions and 2 deletions

View File

@ -29,6 +29,7 @@ using namespace std;
#include <boost/algorithm/string.hpp> #include <boost/algorithm/string.hpp>
using namespace boost; using namespace boost;
#include "functor_json.h"
#include "bytestream.h" #include "bytestream.h"
#include "functioncolumn.h" #include "functioncolumn.h"
#include "constantcolumn.h" #include "constantcolumn.h"
@ -323,6 +324,46 @@ void FunctionColumn::unserialize(messageqcpp::ByteStream& b)
Func_decode* decode = dynamic_cast<Func_decode*>(fFunctor); Func_decode* decode = dynamic_cast<Func_decode*>(fFunctor);
if (decode) if (decode)
fFunctor = fDynamicFunctor = new Func_decode(); fFunctor = fDynamicFunctor = new Func_decode();
// Special treatment for json function contains the variable path. reset the variable path
if (dynamic_cast<Func_json_length*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_length();
if (dynamic_cast<Func_json_keys*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_keys();
if (dynamic_cast<Func_json_exists*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_exists();
if (dynamic_cast<Func_json_value*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_value();
if (dynamic_cast<Func_json_query*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_query();
if (dynamic_cast<Func_json_contains*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_contains();
if (dynamic_cast<Func_json_array_append*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_array_append();
if (dynamic_cast<Func_json_array_insert*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_array_insert();
if (auto f = dynamic_cast<Func_json_insert*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_insert(f->getMode());
if (dynamic_cast<Func_json_remove*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_remove();
if (dynamic_cast<Func_json_contains_path*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_contains_path();
if (dynamic_cast<Func_json_search*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_search();
if (dynamic_cast<Func_json_extract*>(fFunctor))
fFunctor = fDynamicFunctor = new Func_json_extract();
} }
bool FunctionColumn::operator==(const FunctionColumn& t) const bool FunctionColumn::operator==(const FunctionColumn& t) const

View File

@ -3550,6 +3550,35 @@ ReturnedColumn* buildReturnedColumn(Item* item, gp_walk_info& gwi, bool& nonSupp
return rc; return rc;
} }
// parse the boolean fields to string "true" or "false"
ReturnedColumn* buildBooleanConstantColumn(Item* item, gp_walk_info& gwi, bool& nonSupport)
{
ConstantColumn* cc = NULL;
if (gwi.thd)
{
{
if (!item->fixed())
{
item->fix_fields(gwi.thd, (Item**)&item);
}
}
}
int64_t val = static_cast<int64_t>(item->val_int());
cc = new ConstantColumnSInt(colType_MysqlToIDB(item), val ? "true" : "false", val);
if (cc)
cc->timeZone(gwi.timeZone);
if (cc && item->name.length)
cc->alias(item->name.str);
if (cc)
cc->charsetNumber(item->collation.collation->number);
return cc;
}
ArithmeticColumn* buildArithmeticColumn(Item_func* item, gp_walk_info& gwi, bool& nonSupport) ArithmeticColumn* buildArithmeticColumn(Item_func* item, gp_walk_info& gwi, bool& nonSupport)
{ {
if (get_fe_conn_info_ptr() == NULL) if (get_fe_conn_info_ptr() == NULL)
@ -4013,7 +4042,24 @@ ReturnedColumn* buildFunctionColumn(Item_func* ifp, gp_walk_info& gwi, bool& non
return NULL; return NULL;
} }
ReturnedColumn* rc = buildReturnedColumn(ifp->arguments()[i], gwi, nonSupport); ReturnedColumn* rc = NULL;
// Special treatment for json functions
// All boolean arguments will be parsed as boolean string true(false)
// E.g. the result of `SELECT JSON_ARRAY(true, false)` should be [true, false] instead of [1, 0]
bool mayHasBoolArg = ((funcName == "json_insert" || funcName == "json_replace" ||
funcName == "json_set" || funcName == "json_array_append" ||
funcName == "json_array_insert") && i != 0 && i % 2 == 0) ||
(funcName == "json_array") ||
(funcName == "json_object" && i % 2 == 1);
bool isBoolType =
(ifp->arguments()[i]->const_item() && ifp->arguments()[i]->type_handler()->is_bool_type());
if (mayHasBoolArg && isBoolType)
rc = buildBooleanConstantColumn(ifp->arguments()[i], gwi, nonSupport);
else
rc = buildReturnedColumn(ifp->arguments()[i], gwi, nonSupport);
// MCOL-1510 It must be a temp table field, so find the corresponding column. // MCOL-1510 It must be a temp table field, so find the corresponding column.
if (!rc && ifp->arguments()[i]->type() == Item::REF_ITEM) if (!rc && ifp->arguments()[i]->type() == Item::REF_ITEM)

View File

@ -0,0 +1,28 @@
DROP DATABASE IF EXISTS json_array_db;
CREATE DATABASE json_array_db;
USE json_array_db;
# ----------------------------------------------------------------------
# Test of JSON_ARRAY function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l text) ENGINE = COLUMNSTORE;
INSERT INTO t1 VALUES('1');
SELECT JSON_ARRAY() FROM t1;
JSON_ARRAY()
[]
SELECT JSON_ARRAY(l) FROM t1;
JSON_ARRAY(l)
["1"]
SELECT JSON_ARRAY(l, null, 'My name is "Foo"', 3.1415, 6) FROM t1;
JSON_ARRAY(l, null, 'My name is "Foo"', 3.1415, 6)
["1", null, "My name is \"Foo\"", 3.1415, 6]
SELECT JSON_ARRAY(l, true, false, "true", "false") FROM t1;
JSON_ARRAY(l, true, false, "true", "false")
["1", true, false, "true", "false"]
SELECT JSON_ARRAY(l, '{"key1":"value1"}') FROM t1;
JSON_ARRAY(l, '{"key1":"value1"}')
["1", "{\"key1\":\"value1\"}"]
SELECT JSON_ARRAY(l, JSON_COMPACT( '{"key1":"value1"}' ) ) FROM t1;
JSON_ARRAY(l, JSON_COMPACT( '{"key1":"value1"}' ) )
["1", "{\"key1\":\"value1\"}"]
DROP TABLE t1;
DROP DATABASE json_array_db;

View File

@ -0,0 +1,94 @@
DROP DATABASE IF EXISTS json_array_append_db;
CREATE DATABASE json_array_append_db;
USE json_array_append_db;
# ----------------------------------------------------------------------
# Test of JSON_ARRAY_APPEND function.
# ----------------------------------------------------------------------
CREATE TABLE t1(a TEXT, p TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('[1,2,3]', '$[0]', 2),
('[1,2,3]', '$[0]', 1.2),
('[1,2,3]', '$[0]', 'key1'),
('[1,2,3]', '$[0]', TRUE),
('[1,2,3]', '$[0]', false),
('[1,2,3]', '$[0]', NULL),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.b',
4
),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.c',
'grape'
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.b',
4
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.c',
'grape'
);
SELECT
a AS arrary,
p AS path,
v AS value,
JSON_ARRAY_APPEND(a, p, v) AS result
FROM
t1;
arrary path value result
[1,2,3] $[0] 2 [[1, "2"], 2, 3]
[1,2,3] $[0] 1.2 [[1, "1.2"], 2, 3]
[1,2,3] $[0] key1 [[1, "key1"], 2, 3]
[1,2,3] $[0] 1 [[1, "1"], 2, 3]
[1,2,3] $[0] 0 [[1, "0"], 2, 3]
[1,2,3] $[0] NULL [[1, null], 2, 3]
{"a": "foo", "b": "bar", "c": "wibble" } $.b 4 {"a": "foo", "b": ["bar", "4"], "c": "wibble"}
{"a": "foo", "b": "bar", "c": "wibble" } $.c grape {"a": "foo", "b": "bar", "c": ["wibble", "grape"]}
{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]} $.b 4 {"a": "foo", "b": [1, 2, 3, "4"], "c": ["apple", "pear"]}
{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]} $.c grape {"a": "foo", "b": [1, 2, 3], "c": ["apple", "pear", "grape"]}
# NULL args
CREATE TABLE t2(a TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
('[1,2,3]');
SELECT
JSON_ARRAY_APPEND(a, NULL, JSON_COMPACT(1)),
JSON_ARRAY_APPEND(a, '$', NULL)
FROM
t2;
JSON_ARRAY_APPEND(a, NULL, JSON_COMPACT(1)) JSON_ARRAY_APPEND(a, '$', NULL)
NULL [1, 2, 3, null]
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$.b');
SELECT
JSON_ARRAY_APPEND(NULL, a, JSON_COMPACT(1)),
JSON_ARRAY_APPEND('[1,2,3]', a, NULL)
FROM
t2;
JSON_ARRAY_APPEND(NULL, a, JSON_COMPACT(1)) JSON_ARRAY_APPEND('[1,2,3]', a, NULL)
NULL NULL
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$**[0]');
error ER_INVALID_JSON_PATH_WILDCARD
SELECT
JSON_ARRAY_APPEND(JSON_COMPACT('{"a": {"b": [3]}}'), a, 6)
FROM
t2;
JSON_ARRAY_APPEND(JSON_COMPACT('{"a": {"b": [3]}}'), a, 6)
NULL
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_array_append_db;

View File

@ -0,0 +1,94 @@
DROP DATABASE IF EXISTS json_array_insert_db;
CREATE DATABASE json_array_insert_db;
USE json_array_insert_db;
# ----------------------------------------------------------------------
# Test of JSON_ARRAY_INSERT function.
# ----------------------------------------------------------------------
CREATE TABLE t1(a TEXT, p TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('[1,2,3]', '$[0]', 2),
('[1,2,3]', '$[0]', 1.2),
('[1,2,3]', '$[0]', 'key1'),
('[1,2,3]', '$[0]', TRUE),
('[1,2,3]', '$[0]', false),
('[1,2,3]', '$[0]', NULL),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.b',
4
),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.c',
'grape'
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.b',
4
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.c',
'grape'
);
SELECT
a AS arrary,
p AS path,
v AS value,
JSON_ARRAY_INSERT(a, p, v) AS result
FROM
t1;
arrary path value result
[1,2,3] $[0] 2 ["2", 1, 2, 3]
[1,2,3] $[0] 1.2 ["1.2", 1, 2, 3]
[1,2,3] $[0] key1 ["key1", 1, 2, 3]
[1,2,3] $[0] 1 ["1", 1, 2, 3]
[1,2,3] $[0] 0 ["0", 1, 2, 3]
[1,2,3] $[0] NULL [null, 1, 2, 3]
{"a": "foo", "b": "bar", "c": "wibble" } $.b 4 NULL
{"a": "foo", "b": "bar", "c": "wibble" } $.c grape NULL
{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]} $.b 4 NULL
{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]} $.c grape NULL
# NULL args
CREATE TABLE t2(a TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
('[1,2,3]');
SELECT
JSON_ARRAY_INSERT(a, NULL, JSON_COMPACT(1)),
JSON_ARRAY_INSERT(a, '$', NULL)
FROM
t2;
JSON_ARRAY_INSERT(a, NULL, JSON_COMPACT(1)) JSON_ARRAY_INSERT(a, '$', NULL)
NULL NULL
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$.b');
SELECT
JSON_ARRAY_INSERT(NULL, a, JSON_COMPACT(1)),
JSON_ARRAY_INSERT('[1,2,3]', a, NULL)
FROM
t2;
JSON_ARRAY_INSERT(NULL, a, JSON_COMPACT(1)) JSON_ARRAY_INSERT('[1,2,3]', a, NULL)
NULL NULL
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$**[0]');
error ER_INVALID_JSON_PATH_WILDCARD
SELECT
JSON_ARRAY_INSERT(JSON_COMPACT('{"a": {"b": [3]}}'), a, 6)
FROM
t2;
JSON_ARRAY_INSERT(JSON_COMPACT('{"a": {"b": [3]}}'), a, 6)
NULL
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_array_insert_db;

View File

@ -0,0 +1,81 @@
DROP DATABASE IF EXISTS json_contains_db;
CREATE DATABASE json_contains_db;
USE json_contains_db;
# ----------------------------------------------------------------------
# Test of JSON_CONTAINS function.
# ----------------------------------------------------------------------
CREATE TABLE t1(j LONGTEXT, v LONGTEXT, p LONGTEXT) ENGINE = columnstore;
INSERT INTO
t1
VALUES
('{"k1":123, "k2":345}', '123', '$.k1'),
('', '', '$'),
('null', 'null', '$'),
('"10"', '"10"', '$'),
('"10"', '10', '$'),
('10.1', '10', '$'),
('10.0', '10', '$');
SELECT
j AS json,
v AS value,
p AS path,
JSON_CONTAINS(j, v, p) AS result
FROM
t1;
json value path result
{"k1":123, "k2":345} 123 $.k1 1
NULL NULL $ NULL
null null $ 1
"10" "10" $ 1
"10" 10 $ 0
10.1 10 $ 0
10.0 10 $ 1
CREATE TABLE t2(j LONGTEXT, v LONGTEXT) ENGINE = columnstore;
INSERT INTO
t2
VALUES
('"you"', '"you"'),
('"youth"', '"you"'),
('[1]', '1'),
('[2, 1]', '1'),
('[2, [2, 3], 1]', '1'),
('[4, [2, 3], 1]', '2'),
('[2, 1]', '[1, 2]'),
('[2, 1]', '[1, 0, 2]'),
('[2, 0, 3, 1]', '[1, 2]'),
('{"b":[1,2], "a":1}', '{"a":1, "b":2}'),
('{"a":1}', '{}'),
('[1, {"a":1}]', '{}'),
('[1, {"a":1}]', '{"a":1}'),
('[{"abc":"def", "def":"abc"}]', '["foo","bar"]'),
(
'[{"abc":"def", "def":"abc"}, "bar"]',
'["bar", {}]'
),
('[{"a":"b"},{"c":"d"}]', '{"c":"d"}');
SELECT
j AS json,
v AS value,
JSON_CONTAINS(j, v) AS result
FROM
t2;
json value result
"you" "you" 1
"youth" "you" 0
[1] 1 1
[2, 1] 1 1
[2, [2, 3], 1] 1 1
[4, [2, 3], 1] 2 1
[2, 1] [1, 2] 1
[2, 1] [1, 0, 2] 0
[2, 0, 3, 1] [1, 2] 1
{"b":[1,2], "a":1} {"a":1, "b":2} 1
{"a":1} {} 1
[1, {"a":1}] {} 1
[1, {"a":1}] {"a":1} 1
[{"abc":"def", "def":"abc"}] ["foo","bar"] 0
[{"abc":"def", "def":"abc"}, "bar"] ["bar", {}] 1
[{"a":"b"},{"c":"d"}] {"c":"d"} 1
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_contains_db;

View File

@ -0,0 +1,69 @@
DROP DATABASE IF EXISTS json_contains_path_db;
CREATE DATABASE json_contains_path_db;
USE json_contains_path_db;
# ----------------------------------------------------------------------
# Test of JSON_CONTAINS_PATH function.
# ----------------------------------------------------------------------
CREATE TABLE t1(j TEXT, r TEXT, p TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('{"key1":1, "key2":[2,3]}', "oNE", "$.key2[1]"),
('{"key1":1, "key2":[2,3]}', "oNE", "$.key2[10]"),
('{"key1":1, "key2":[2,3]}', "oNE", "$.ma"),
('{"key1":1, "key2":[2,3]}', "one", "$.key1"),
('{ "a": true }', NULL, '$.a'),
('{ "a": true }', 'all', NULL),
('{"a":{"b":"c"}}', 'one', '$.a.*');
SELECT
j AS json,
r AS return_flag,
p AS path,
JSON_CONTAINS_PATH(j, r, p) AS result
FROM
t1;
json return_flag path result
{"key1":1, "key2":[2,3]} oNE $.key2[1] 1
{"key1":1, "key2":[2,3]} oNE $.key2[10] 0
{"key1":1, "key2":[2,3]} oNE $.ma 0
{"key1":1, "key2":[2,3]} one $.key1 1
{ "a": true } NULL $.a NULL
{ "a": true } all NULL NULL
{"a":{"b":"c"}} one $.a.* 1
CREATE TABLE t2(j TEXT, r TEXT, p1 TEXT, p2 TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
(
'{"key1":1, "key2":[2,3]}',
"one",
"$.key1",
"$.ma"
),
(
'{"key1":1, "key2":[2,3]}',
"aLl",
"$.key1",
"$.ma"
),
(
'{"key1":1, "key2":[2,3]}',
"aLl",
"$.key1",
"$.key2"
);
SELECT
j AS json,
r AS return_flag,
p1 AS path,
p2 AS path,
JSON_CONTAINS_PATH(j, r, p1, p2) AS result
FROM
t2;
json return_flag path path result
{"key1":1, "key2":[2,3]} one $.key1 $.ma 1
{"key1":1, "key2":[2,3]} aLl $.key1 $.ma 0
{"key1":1, "key2":[2,3]} aLl $.key1 $.key2 1
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_contains_path_db;

View File

@ -0,0 +1,57 @@
DROP DATABASE IF EXISTS json_depth_db;
CREATE DATABASE json_depth_db;
USE json_depth_db;
# ----------------------------------------------------------------------
# Test of JSON_DEPTH function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
# Return NULL
INSERT INTO t1 VALUES(NULL);
SELECT JSON_DEPTH(l) FROM t1;
JSON_DEPTH(l)
NULL
# Return 1
TRUNCATE t1;
INSERT INTO t1 VALUES('{}');
INSERT INTO t1 VALUES('[]');
INSERT INTO t1 VALUES('"abc"');
SELECT JSON_DEPTH(l) FROM t1;
JSON_DEPTH(l)
1
1
1
# Error ER_INVALID_TYPE_FOR_JSON
CREATE TABLE t2(i int) ENGINE=columnstore;
INSERT INTO t2 VALUES(1);
SELECT JSON_DEPTH(i) FROM t2;
JSON_DEPTH(i)
1
DROP TABLE t2;
# Error ER_INVALID_JSON_TEXT_IN_PARAM
TRUNCATE t1;
INSERT INTO t1 VALUES('abc');
INSERT INTO t1 VALUES('[ "a", true, "b" , { "e" : false }, "c" , null');
SELECT JSON_DEPTH(l) FROM t1;
JSON_DEPTH(l)
NULL
NULL
# Return 2
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "a" : true, "b" : false, "c" : null }');
INSERT INTO t1 VALUES('[ "a", true, "b" , false, "c" , null ]');
INSERT INTO t1 VALUES('{ "a" : true, "b" : {}, "c" : null }');
INSERT INTO t1 VALUES('[ "a", true, "b" , {}, "c" , null ]');
SELECT JSON_DEPTH(l) FROM t1;
JSON_DEPTH(l)
2
2
2
2
# Return 3
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "a" : true, "b" : { "e" : false }, "c" : null }');
SELECT JSON_DEPTH(l) FROM t1;
JSON_DEPTH(l)
3
DROP TABLE t1;
DROP DATABASE json_depth_db;

View File

@ -0,0 +1,44 @@
DROP DATABASE IF EXISTS json_equals_db;
CREATE DATABASE json_equals_db;
USE json_equals_db;
# ----------------------------------------------------------------------
# Test of JSON_EQUALS function.
# ----------------------------------------------------------------------
# Return 1
CREATE TABLE t1(l LONGTEXT, r LONGTEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"a":1,"b":2}','{"a":1,"b":2}');
INSERT INTO t1 VALUES('{"a":1,"b":2}','{"b":2,"a":1}');
INSERT INTO t1 VALUES('{"a":1,"b":2}','{"a": 1,"b": 2}');
INSERT INTO t1 VALUES('{"a": 1,"b":2}','{"b":2,"a":1}');
INSERT INTO t1 VALUES('[1,2]','[1,2]');
INSERT INTO t1 VALUES('[1,2]','[1 , 2]');
INSERT INTO t1 VALUES(1,1);
SELECT JSON_EQUALS(l, r) FROM t1;
JSON_EQUALS(l, r)
1
1
1
1
1
1
1
# Return 0
TRUNCATE t1;
INSERT INTO t1 VALUES('{"a":1,"b":3}','{"a":1,"b":2}');
INSERT INTO t1 VALUES('[1,2]','[2,1]');
INSERT INTO t1 VALUES(1,2);
SELECT JSON_EQUALS(l, r) FROM t1;
JSON_EQUALS(l, r)
0
0
0
# NULL
TRUNCATE t1;
INSERT INTO t1 VALUES('["a",true,{"e":false},null','["a",true,{"e":false},null');
INSERT INTO t1 VALUES('s1',"s1");
SELECT JSON_EQUALS(l, r) FROM t1;
JSON_EQUALS(l, r)
NULL
NULL
DROP TABLE t1;
DROP DATABASE json_equals_db;

View File

@ -0,0 +1,32 @@
DROP DATABASE IF EXISTS json_exists_db;
CREATE DATABASE json_exists_db;
USE json_exists_db;
# ----------------------------------------------------------------------
# Test of JSON_EXISTS function.
# ----------------------------------------------------------------------
# Test case 0
CREATE TABLE t1(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '{"key1":"xxxx", "key2":[1, 2, 3]}';
INSERT INTO
t1
VALUES
(@json, '$.key1'),
(@json, '$.key1[0]'),
(@json, '$.key2'),
(@json, '$.key2[1]'),
(@json, '$.key2[10]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t1;
j p result
{"key1":"xxxx", "key2":[1, 2, 3]} $.key1 1
{"key1":"xxxx", "key2":[1, 2, 3]} $.key1[0] 1
{"key1":"xxxx", "key2":[1, 2, 3]} $.key2 1
{"key1":"xxxx", "key2":[1, 2, 3]} $.key2[1] 1
{"key1":"xxxx", "key2":[1, 2, 3]} $.key2[10] 0
DROP TABLE t1;
DROP DATABASE json_exists_db;

View File

@ -0,0 +1,88 @@
DROP DATABASE IF EXISTS json_extract_db;
CREATE DATABASE json_extract_db;
USE json_extract_db;
# ----------------------------------------------------------------------
# Test of JSON_EXTRACT function.
# ----------------------------------------------------------------------
# Single path expression
CREATE TABLE t1(j LONGTEXT, p LONGTEXT) ENGINE = COLUMNSTORE;
SET
@json = '[1, "val2", [3.1, -4]]';
INSERT INTO
t1
VALUES
(@json, '$[0]'),
(@json, '$[1]'),
(@json, '$[2]'),
(@json, '$[3]'),
(@json, '$[2][0]'),
(@json, '$[2][1]'),
(@json, '$[2][10]'),
(@json, '$'),
('1', '$'),
('[10, 20, [30, 40], 1, 10]', '$[1]'),
('{"key1":"asd", "key2":[2,3]}', "$.key1"),
('{"key0":true, "key1":"qwe"}', "$.key1"),
('[10, 20, [30, 40]]', '$[2][*]'),
('[10, 20, [{"a":3}, 30, 40]]', '$[2][*]'),
(json_object('foo', 'foobar'), '$');
SELECT
j,
p,
JSON_EXTRACT(j, p) AS result
FROM
t1;
j p result
[1, "val2", [3.1, -4]] $[0] 1
[1, "val2", [3.1, -4]] $[1] "val2"
[1, "val2", [3.1, -4]] $[2] [3.1, -4]
[1, "val2", [3.1, -4]] $[3] NULL
[1, "val2", [3.1, -4]] $[2][0] 3.1
[1, "val2", [3.1, -4]] $[2][1] -4
[1, "val2", [3.1, -4]] $[2][10] NULL
[1, "val2", [3.1, -4]] $ [1, "val2", [3.1, -4]]
1 $ 1
[10, 20, [30, 40], 1, 10] $[1] 20
{"key1":"asd", "key2":[2,3]} $.key1 "asd"
{"key0":true, "key1":"qwe"} $.key1 "qwe"
[10, 20, [30, 40]] $[2][*] [30, 40]
[10, 20, [{"a":3}, 30, 40]] $[2][*] [{"a": 3}, 30, 40]
{"foo": "foobar"} $ {"foo": "foobar"}
# Multiple path expression
CREATE TABLE t2(j LONGTEXT, p1 LONGTEXT, p2 LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
(
'{"key1":"asd", "key2":[2,3]}',
"$.keyX",
"$.keyY"
),
(
'{"key1":"asd", "key2":[2,3]}',
"$.key1",
"$.key2"
),
('{"key1":5, "key2":[2,3]}', "$.key1", "$.key2"),
('[10, 20, [30, 40], 1, 10]', '$[1]', '$[25]'),
('[{"a": [3, 4]}, {"b": 2}]', '$[0].a', '$[1].a');
SELECT
j,
p1,
p2,
JSON_EXTRACT(j, p1, p2) AS result
FROM
t2;
j p1 p2 result
{"key1":"asd", "key2":[2,3]} $.keyX $.keyY NULL
{"key1":"asd", "key2":[2,3]} $.key1 $.key2 ["asd", [2, 3]]
{"key1":5, "key2":[2,3]} $.key1 $.key2 [5, [2, 3]]
[10, 20, [30, 40], 1, 10] $[1] $[25] [20]
[{"a": [3, 4]}, {"b": 2}] $[0].a $[1].a [[3, 4]]
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_extract_db;

View File

@ -0,0 +1,56 @@
DROP DATABASE IF EXISTS json_insert_de;
CREATE DATABASE json_insert_de;
USE json_insert_de;
# ----------------------------------------------------------------------
# Test of JSON_INSERT|REPLACE|SET function.
# ----------------------------------------------------------------------
CREATE TABLE t1(j TEXT, p TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
(
'{"a":1, "b":{"c":1}, "d":[1, 2]}',
'$.b.k1',
'word'
),
('{"a":1, "b":{"c":1}, "d":[1, 2]}', '$.d[3]', 3),
('{"a":1, "b":{"c":1}, "d":[1, 2]}', '$.a[2]', 2),
(
'{"a":1, "b":{"c":1}, "d":[1, 2]}',
'$.b.c',
'word'
),
('1', '$[0]', 4),
('[]', '$[0][0]', 100),
('1', '$[0][0]', 100),
(
'{ "a": 1, "b": [2, 3]}',
'$.a',
10
),
(
'{ "a": 1, "b": [2, 3]}',
'$.b',
'[true, false]'
);
SELECT
j AS json,
p AS path,
v AS value,
JSON_INSERT(j, p, v) AS json_insert,
JSON_REPLACE(j, p, v) AS json_replace,
JSON_SET(j, p, v) AS json_set
FROM
t1;
json path value json_insert json_replace json_set
{"a":1, "b":{"c":1}, "d":[1, 2]} $.b.k1 word {"a": 1, "b": {"c": 1, "k1": "word"}, "d": [1, 2]} {"a": 1, "b": {"c": 1}, "d": [1, 2]} {"a": 1, "b": {"c": 1, "k1": "word"}, "d": [1, 2]}
{"a":1, "b":{"c":1}, "d":[1, 2]} $.d[3] 3 {"a": 1, "b": {"c": 1}, "d": [1, 2, "3"]} {"a": 1, "b": {"c": 1}, "d": [1, 2]} {"a": 1, "b": {"c": 1}, "d": [1, 2, "3"]}
{"a":1, "b":{"c":1}, "d":[1, 2]} $.a[2] 2 {"a": [1, "2"], "b": {"c": 1}, "d": [1, 2]} {"a": 1, "b": {"c": 1}, "d": [1, 2]} {"a": [1, "2"], "b": {"c": 1}, "d": [1, 2]}
{"a":1, "b":{"c":1}, "d":[1, 2]} $.b.c word {"a": 1, "b": {"c": 1}, "d": [1, 2]} {"a": 1, "b": {"c": "word"}, "d": [1, 2]} {"a": 1, "b": {"c": "word"}, "d": [1, 2]}
1 $[0] 4 1 NULL NULL
[] $[0][0] 100 [] [] []
1 $[0][0] 100 1 NULL NULL
{ "a": 1, "b": [2, 3]} $.a 10 {"a": 1, "b": [2, 3]} {"a": "10", "b": [2, 3]} {"a": "10", "b": [2, 3]}
{ "a": 1, "b": [2, 3]} $.b [true, false] {"a": 1, "b": [2, 3]} {"a": 1, "b": "[true, false]"} {"a": 1, "b": "[true, false]"}
DROP TABLE t1;
DROP DATABASE json_insert_de;

View File

@ -0,0 +1,32 @@
DROP DATABASE IF EXISTS json_keys_db;
CREATE DATABASE json_keys_db;
USE json_keys_db;
# ----------------------------------------------------------------------
# Test of JSON_KEYS function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l TEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"key1":"value1", "key2":{"key3": "value2"}}');
INSERT INTO t1 VALUES('{"key1":"value1", "key2":{"key3": "value3", "key4":"value4"}}');
INSERT INTO t1 VALUES('{"key1":"value1" "key2":{"key3": "value3", "key4":"value4"}}');
SELECT JSON_KEYS(l) from t1;
JSON_KEYS(l)
["key1", "key2"]
["key1", "key2"]
NULL
SELECT JSON_KEYS(l, '$.key2') from t1;
JSON_KEYS(l, '$.key2')
["key3"]
["key3", "key4"]
NULL
SELECT JSON_KEYS(l, '$.key1') from t1;
JSON_KEYS(l, '$.key1')
NULL
NULL
NULL
SELECT JSON_KEYS(l, '$.key123') from t1;
JSON_KEYS(l, '$.key123')
NULL
NULL
NULL
DROP TABLE t1;
DROP DATABASE json_keys_db;

View File

@ -0,0 +1,123 @@
DROP DATABASE IF EXISTS json_length_db;
CREATE DATABASE json_length_db;
USE json_length_db;
# ----------------------------------------------------------------------
# Test of JSON_LENGTH function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
# Invalid json text
INSERT INTO t1 VALUES(null);
INSERT INTO t1 VALUES('1');
INSERT INTO t1 VALUES('abc');
INSERT INTO t1 VALUES('"abc"');
INSERT INTO t1 VALUES('true');
INSERT INTO t1 VALUES('false');
INSERT INTO t1 VALUES('null');
error ER_INVALID_JSON_TEXT_IN_PARAM
SELECT JSON_LENGTH(l) FROM t1;
JSON_LENGTH(l)
NULL
1
NULL
1
1
1
1
# Valid json text
TRUNCATE t1;
INSERT INTO t1 VALUES('{}');
INSERT INTO t1 VALUES('{ "a" : 100, "b" : 200 }');
INSERT INTO t1 VALUES('{ "a" : 100, "b" : [ 300, 400, 500 ] }');
INSERT INTO t1 VALUES('[]');
INSERT INTO t1 VALUES('[ null, "foo", true, 1.1 ]');
INSERT INTO t1 VALUES('[ null, "foo", true, { "a" : "b", "c" : "d" } ]');
INSERT INTO t1 VALUES('"foo"');
INSERT INTO t1 VALUES('1.2');
SELECT JSON_LENGTH(l) FROM t1;
JSON_LENGTH(l)
0
2
2
0
4
4
1
1
# Bad path expressions
TRUNCATE t1;
INSERT INTO t1 VALUES('true');
error ER_INVALID_JSON_PATH
SELECT JSON_LENGTH(l, 'c$') FROM t1;
JSON_LENGTH(l, 'c$')
NULL
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "foo" : [ true, false ] }');
error ER_INVALID_JSON_PATH
SELECT JSON_LENGTH(l, '$.foo[bar]') FROM t1;
JSON_LENGTH(l, '$.foo[bar]')
NULL
# wildcards not allowed in path expressions for this function
TRUNCATE t1;
INSERT INTO t1 VALUES('true');
error ER_INVALID_JSON_PATH_WILDCARD
SELECT JSON_LENGTH(l, '$.*') FROM t1;
JSON_LENGTH(l, '$.*')
NULL
TRUNCATE t1;
INSERT INTO t1 VALUES('true');
error ER_INVALID_JSON_PATH_WILDCARD
SELECT JSON_LENGTH(l, '$.foo**.bar') FROM t1;
JSON_LENGTH(l, '$.foo**.bar')
NULL
# Error ER_INVALID_JSON_TEXT_IN_PARAM
TRUNCATE t1;
INSERT INTO t1 VALUES('abc');
INSERT INTO t1 VALUES('[ "a", true, "b" , { "e" : false }, "c" , null');
SELECT JSON_LENGTH(l) FROM t1;
JSON_LENGTH(l)
NULL
NULL
# Path exist
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, [ 2, 3, 4 ], 5 ]');
SELECT JSON_LENGTH(l, '$[0]') FROM t1;
JSON_LENGTH(l, '$[0]')
1
SELECT JSON_LENGTH(l, '$[1]') FROM t1;
JSON_LENGTH(l, '$[1]')
3
SELECT JSON_LENGTH(l, '$[2]') FROM t1;
JSON_LENGTH(l, '$[2]')
1
SELECT JSON_LENGTH(l, '$[2][0]') FROM t1;
JSON_LENGTH(l, '$[2][0]')
1
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, [ 2, 3, 4 ], {"a": 1} ]');
SELECT JSON_LENGTH(l, '$[2][0]') FROM t1;
JSON_LENGTH(l, '$[2][0]')
1
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, { "a": true, "b": false, "c": null }, 5 ]');
SELECT JSON_LENGTH(l, '$[1]') FROM t1;
JSON_LENGTH(l, '$[1]')
3
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "a" : 123, "b" : [ 123, 456, 789 ] }');
SELECT JSON_LENGTH(l) FROM t1;
JSON_LENGTH(l)
2
SELECT JSON_LENGTH(l, '$.b') FROM t1;
JSON_LENGTH(l, '$.b')
3
SELECT JSON_LENGTH(l, '$.c') FROM t1;
JSON_LENGTH(l, '$.c')
NULL
# No-existent path
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, [ 2, 3, 4 ], 5 ]');
SELECT JSON_LENGTH(l, '$[2][1]') FROM t1;
JSON_LENGTH(l, '$[2][1]')
NULL
DROP TABLE t1;
DROP DATABASE json_length_db;

View File

@ -0,0 +1,105 @@
DROP DATABASE IF EXISTS json_merge_db;
CREATE DATABASE json_merge_db;
USE json_merge_db;
# ----------------------------------------------------------------------
# Test of JSON_MERGE function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l TEXT, r TEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('[1, 2, 3]','[4, 5, 6]');
# Not enough args
SELECT JSON_MERGE() FROM t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_MERGE'
SELECT JSON_MERGE(l) FROM t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_MERGE'
SELECT JSON_MERGE(NULL) FROM t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_MERGE'
# Null args
SELECT JSON_MERGE(NULL, r) FROM t1;
JSON_MERGE(NULL, r)
NULL
SELECT JSON_MERGE(l, NULL) FROM t1;
JSON_MERGE(l, NULL)
NULL
SELECT JSON_MERGE(NULL, l, r) FROM t1;
JSON_MERGE(NULL, l, r)
NULL
SELECT JSON_MERGE(l, NULL, r) FROM t1;
JSON_MERGE(l, NULL, r)
NULL
SELECT JSON_MERGE(l, r, NULL) FROM t1;
JSON_MERGE(l, r, NULL)
NULL
# Invalid JSON text
error ER_INVALID_JSON_TEXT_IN_PARAM
SELECT JSON_MERGE(l, '[4, 5, 6') FROM t1;
JSON_MERGE(l, '[4, 5, 6')
NULL
error ER_INVALID_JSON_TEXT_IN_PARAM
SELECT JSON_MERGE('[1, 2, 3', r) FROM t1;
JSON_MERGE('[1, 2, 3', r)
NULL
# Good JSON_MERGE() Expressions
TRUNCATE t1;
INSERT INTO t1 VALUES('1', '2' );
INSERT INTO t1 VALUES('1', '[2, 3]' );
INSERT INTO t1 VALUES('[1, 2]', '3' );
INSERT INTO t1 VALUES('1', '{ "a": 2 }' );
INSERT INTO t1 VALUES('{ "a": 2 }', '1' );
INSERT INTO t1 VALUES('[1, 2]', '[3, 4]' );
INSERT INTO t1 VALUES('{ "a": 2 }', '{ "b": 3}' );
INSERT INTO t1 VALUES('[1, 2]', '{ "a": 2 }' );
INSERT INTO t1 VALUES('{ "a": 2 }', '[1, 2]' );
INSERT INTO t1 VALUES('{"a": 1, "b": 2 }', '{"b": 3, "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": 2 }', '{"b": [3, 4], "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": [2, 3] }', '{"b": 4, "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": 2 }', '{"b": {"e": 7, "f": 8}, "d": 4 }' );
INSERT INTO t1 VALUES('{"b": {"e": 7, "f": 8}, "d": 4 }', '{"a": 1, "b": 2 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": [2, 9] }', '{"b": [10, 11], "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": [2, 9] }', '{"b": {"e": 7, "f": 8}, "d": 4 }' );
INSERT INTO t1 VALUES('{"b": {"e": 7, "f": 8}, "d": 4 }', '{"a": 1, "b": [2, 9] }' );
INSERT INTO t1 VALUES('{"b": {"e": 7, "f": 8}, "d": 4 }', '{ "a": 1, "b": {"e": 20, "g": 21 } }' );
INSERT INTO t1 VALUES('{ "a" : "foo", "b" : [ true, { "c" : 123 } ] }', '[ 5, 6]');
INSERT INTO t1 VALUES('{ "a" : "foo", "b" : [ true, { "c" : 123 } ] }', '{ "b": [ false, 34 ] }');
INSERT INTO t1 VALUES('{ "a" : "foo", "b" : [ true, { "c" : 123 } ] }','{ "b": "bar" }');
INSERT INTO t1 VALUES('{ "a" : { "b" : 1 } }','{ "a" : { "c" : 1 } }');
SELECT JSON_MERGE(l, r) FROM t1;
JSON_MERGE(l, r)
[1, 2]
[1, 2, 3]
[1, 2, 3]
[1, {"a": 2}]
[{"a": 2}, 1]
[1, 2, 3, 4]
{"a": 2, "b": 3}
[1, 2, {"a": 2}]
[{"a": 2}, 1, 2]
{"a": 1, "b": [2, 3], "d": 4}
{"a": 1, "b": [2, 3, 4], "d": 4}
{"a": 1, "b": [2, 3, 4], "d": 4}
{"a": 1, "b": [2, {"e": 7, "f": 8}], "d": 4}
{"b": [{"e": 7, "f": 8}, 2], "d": 4, "a": 1}
{"a": 1, "b": [2, 9, 10, 11], "d": 4}
{"a": 1, "b": [2, 9, {"e": 7, "f": 8}], "d": 4}
{"b": [{"e": 7, "f": 8}, 2, 9], "d": 4, "a": 1}
{"b": {"e": [7, 20], "f": 8, "g": 21}, "d": 4, "a": 1}
[{"a": "foo", "b": [true, {"c": 123}]}, 5, 6]
{"a": "foo", "b": [true, {"c": 123}, false, 34]}
{"a": "foo", "b": [true, {"c": 123}, "bar"]}
{"a": {"b": 1, "c": 1}}
CREATE TABLE t2(l1 TEXT, l2 TEXT, l3 TEXT) ENGINE = columnstore;
INSERT INTO t2 VALUES('1', '2', '3' );
INSERT INTO t2 VALUES('[1, 2 ]', '3', '[4, 5]' );
INSERT INTO t2 VALUES
(
'{ "a": true, "b": { "c": 3, "d": 4 }, "e": [ 1, 2 ] }',
'{ "d": false, "b": { "g": 3, "d": 5 }, "f": [ 1, 2 ] }',
'{ "m": true, "b": { "h": 8, "d": 4 }, "e": [ 3, 4 ] }'
);
SELECT JSON_MERGE(l1, l2, l3) from t2;
JSON_MERGE(l1, l2, l3)
[1, 2, 3]
[1, 2, 3, 4, 5]
{"a": true, "b": {"c": 3, "d": [4, 5, 4], "g": 3, "h": 8}, "e": [1, 2, 3, 4], "d": false, "f": [1, 2], "m": true}
DROP TABLE t1;
DROP TABLE t2;
DROP DATABASE json_merge_db;

View File

@ -0,0 +1,77 @@
DROP DATABASE IF EXISTS json_merge_patch_db;
Warnings:
Note 1008 Can't drop database 'json_merge_patch_db'; database doesn't exist
CREATE DATABASE json_merge_patch_db;
USE json_merge_patch_db;
CREATE TABLE t1(l1 TEXT, l2 TEXT) ENGINE = columnstore;
INSERT INTO t1(l1, l2) VALUES
('{"a":"b"}', '{"a":"c"}'),
('{"a":"b"}', '{"b":"c"}'),
('{"a":"b"}', '{"a":null}'),
('{"a":"b", "b":"c"}', '{"a":null}'),
('{"a":["b"]}', '{"a":"c"}'),
('{"a":"c"}', '{"a":["b"]}'),
('{"a": {"b":"c"}}', '{"a": {"b":"d", "c":null}}'),
('{"a":[{"b":"c"}]}', '{"a": [1]}'),
('["a","b"]', '["c","d"]'),
('{"a":"b"}', '["c"]'),
('{"a":"foo"}', 'null'),
('{"a":"foo"}', '"bar"'),
('{"e":null}', '{"a":1}'),
('[1,2]', '{"a":"b", "c":null}'),
('{}', '{"a":{"bb":{"ccc":null}}}'),
(NULL, '{}'),
('{}', NULL);
SELECT l1, l2,
JSON_MERGE_PATCH(l1, l2) AS `l1 + l2`
FROM t1;
l1 l2 l1 + l2
{"a":"b"} {"a":"c"} {"a": "c"}
{"a":"b"} {"b":"c"} {"a": "b", "b": "c"}
{"a":"b"} {"a":null} {}
{"a":"b", "b":"c"} {"a":null} {"b": "c"}
{"a":["b"]} {"a":"c"} {"a": "c"}
{"a":"c"} {"a":["b"]} {"a": ["b"]}
{"a": {"b":"c"}} {"a": {"b":"d", "c":null}} {"a": {"b": "d"}}
{"a":[{"b":"c"}]} {"a": [1]} {"a": [1]}
["a","b"] ["c","d"] ["c", "d"]
{"a":"b"} ["c"] ["c"]
{"a":"foo"} null null
{"a":"foo"} "bar" "bar"
{"e":null} {"a":1} {"e": null, "a": 1}
[1,2] {"a":"b", "c":null} {"a": "b"}
{} {"a":{"bb":{"ccc":null}}} {"a": {"bb": {}}}
NULL {} NULL
{} NULL NULL
DROP TABLE t1;
CREATE TABLE t2(l1 TEXT, l2 TEXT, l3 TEXT) ENGINE = columnstore;
INSERT INTO t2 VALUES
('{"a":"b"}', NULL, '{"c":"d"}'),
(NULL, '[1,2,3]', '[4,5,6]'),
(NULL, 'a', 'b'),
('{"a":"b"}', '[1,2,3]', '{"c":null,"d":"e"}');
SELECT l1, l2, l3,
JSON_MERGE_PATCH(l1, l2, l3) AS merged
FROM t2;
l1 l2 l3 merged
{"a":"b"} NULL {"c":"d"} NULL
NULL [1,2,3] [4,5,6] [4, 5, 6]
NULL a b NULL
{"a":"b"} [1,2,3] {"c":null,"d":"e"} {"d": "e"}
DROP TABLE t2;
CREATE TABLE t3(l1 TEXT, l2 TEXT) ENGINE = columnstore;
SELECT JSON_MERGE_PATCH() FROM t3;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_MERGE_PATCH'
INSERT INTO t3(l1, l2) VALUES('{}', '{"a":"c"}');
SELECT l1, JSON_MERGE_PATCH(l1) AS merged FROM t3;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_MERGE_PATCH'
INSERT INTO t3(l1, l2) VALUES
('{', '[1,2,3]'),
('{"a":"b"}', '[1,');
SELECT l1, l2, JSON_MERGE_PATCH(l1, l2) AS merged FROM t3;
l1 l2 merged
{} {"a":"c"} {"a": "c"}
{ [1,2,3] NULL
{"a":"b"} [1, NULL
DROP TABLE t3;
DROP DATABASE json_merge_patch_db;

View File

@ -0,0 +1,35 @@
DROP DATABASE IF EXISTS json_normalize_db;
CREATE DATABASE json_normalize_db;
USE json_normalize_db;
# ----------------------------------------------------------------------
# Test of JSON_NORMALIZE function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"key2":"v2","key1":"v1"}');
INSERT INTO t1 VALUES('{"key2": "v2", "key1":"v1"}');
INSERT INTO t1 VALUES('{"key1": "v2", "key1":"v1"}');
INSERT INTO t1 VALUES('{"key1": "v2", "key2":"v1"}');
INSERT INTO t1 VALUES('{}');
INSERT INTO t1 VALUES('{ }');
INSERT INTO t1 VALUES('"123"');
INSERT INTO t1 VALUES('[ 1,2,3]');
INSERT INTO t1 VALUES('[]');
INSERT INTO t1 VALUES('[ ]');
INSERT INTO t1 VALUES(null);
INSERT INTO t1 VALUES('{"key1":value1}');
SELECT JSON_NORMALIZE(l) from t1;
JSON_NORMALIZE(l)
{"key1":"v1","key2":"v2"}
{"key1":"v1","key2":"v2"}
{"key1":"v2","key1":"v1"}
{"key1":"v2","key2":"v1"}
{}
{}
"123"
[1.0E0,2.0E0,3.0E0]
[]
[]
NULL
NULL
DROP TABLE t1;
DROP DATABASE json_normalize_db;

View File

@ -0,0 +1,72 @@
DROP DATABASE IF EXISTS json_object_db;
CREATE DATABASE json_object_db;
USE json_object_db;
# ----------------------------------------------------------------------
# Test of JSON_OBJECT function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l TEXT) ENGINE = COLUMNSTORE;
INSERT INTO t1 VALUES('a');
SELECT json_object(l) FROM t1;
ERROR 42000: Incorrect parameter count in the call to native function 'json_object'
SELECT json_object(l, 1, 'b') FROM t1;
ERROR 42000: Incorrect parameter count in the call to native function 'json_object'
# Null arg
TRUNCATE t1;
INSERT INTO t1 values(null);
SELECT JSON_OBJECT(l, 1) FROM t1;
JSON_OBJECT(l, 1)
{"": 1}
SELECT JSON_OBJECT(1, l) FROM t1;
JSON_OBJECT(1, l)
{"1": null}
# Valid arg
TRUNCATE t1;
INSERT INTO t1 values('a');
SELECT JSON_OBJECT(l, null) FROM t1;
JSON_OBJECT(l, null)
{"a": null}
SELECT JSON_OBJECT(l, 1) FROM t1;
JSON_OBJECT(l, 1)
{"a": 1}
SELECT JSON_OBJECT(l, 1, 'b', 'foo') FROM t1;
JSON_OBJECT(l, 1, 'b', 'foo')
{"a": 1, "b": "foo"}
SELECT JSON_OBJECT(l, 1, 'b', 'foo','c','{ "d": "e" }') FROM t1;
JSON_OBJECT(l, 1, 'b', 'foo','c','{ "d": "e" }')
{"a": 1, "b": "foo", "c": "{ \"d\": \"e\" }"}
SELECT JSON_OBJECT(l, true, 'b', false, 'c', null ) FROM t1;
JSON_OBJECT(l, true, 'b', false, 'c', null )
{"a": true, "b": false, "c": null}
SELECT JSON_OBJECT(l, 'true', 'b', 'false', 'c', null ) FROM t1;
JSON_OBJECT(l, 'true', 'b', 'false', 'c', null )
{"a": "true", "b": "false", "c": null}
SELECT JSON_VALID(json_object(l, 1 )) from t1;
JSON_VALID(json_object(l, 1 ))
1
# Long key
TRUNCATE t1;
INSERT INTO t1 values('a');
# SELECT JSON_OBJECT(REPEAT(l, 64 * 1024), 1) FROM t1;
# Non-string keys are cast to CHAR
TRUNCATE t1;
INSERT INTO t1 values('a');
SELECT JSON_OBJECT(1, l) FROM t1;
JSON_OBJECT(1, l)
{"1": "a"}
SELECT JSON_OBJECT(CAST(1 AS CHAR), l) FROM t1;
JSON_OBJECT(CAST(1 AS CHAR), l)
{"1": "a"}
SELECT JSON_OBJECT(true, l) FROM t1;
JSON_OBJECT(true, l)
{"1": "a"}
SELECT JSON_OBJECT(CAST(true AS CHAR), l) FROM t1;
JSON_OBJECT(CAST(true AS CHAR), l)
{"1": "a"}
SELECT JSON_OBJECT(false, l) FROM t1;
JSON_OBJECT(false, l)
{"0": "a"}
SELECT JSON_OBJECT(CAST(false AS CHAR), l) FROM t1;
JSON_OBJECT(CAST(false AS CHAR), l)
{"0": "a"}
DROP TABLE t1;
DROP DATABASE json_object_db;

View File

@ -0,0 +1,220 @@
DROP DATABASE IF EXISTS json_overlaps_db;
CREATE DATABASE json_overlaps_db;
USE json_overlaps_db;
# ----------------------------------------------------------------------
# Test of JSON_OVERLAPS function.
# ----------------------------------------------------------------------
# Comparing scalar
CREATE TABLE t1(l1 LONGTEXT, l2 LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('true', 'true'),
('false', 'false'),
('1', '1'),
('"string1"', '"string1"'),
('null', 'null'),
('true', 'false'),
('1', '"1"'),
('1', '0'),
('null', '0'),
('"string1"', '"string2"'),
('true', '["abc", 1, 2, true, false]'),
('true', '["abc", 1, 2, [true]]'),
('true', '{"A":true}');
SELECT
l1,
l2,
JSON_OVERLAPS(l1, l2) AS is_overlaps
FROM
t1;
l1 l2 is_overlaps
true true 1
false false 1
1 1 1
"string1" "string1" 1
null null 1
true false 0
1 "1" 0
1 0 0
null 0 0
"string1" "string2" 0
true ["abc", 1, 2, true, false] 1
true ["abc", 1, 2, [true]] 0
true {"A":true} 0
# Testing non-scalar json data types
# Comparing object with object (non-nested)
CREATE TABLE t2(l1 LONGTEXT, l2 LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
('{"A":[1, 2, 3]}', '{}'),
('{"A": 1}', '{"A": 1}'),
('{"A": 1}', '{"B": 1}'),
(
'{"A": 1, "B": "string1"}',
'{"A": 2,"B": "string1"}'
),
(
'{"A": 1,"B": "string1"}',
'{"A": 2,"B": "string2"}'
),
(
'{"A": 1,"B": {"C":2}}',
'{"A": 2,"B": {"C":1}}'
),
(
'{"A": 1,"B": {"C":2}}',
'{"A": 2,"B": {"C":2}}'
),
(
'{"A": {"B": true}}',
'{"A": {"B": true,"C": false}}'
),
(
'{"A":1, "B":{"D":4, "E":5}}',
'{"C":3, "B":{"E":5, "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "B":{"E":5, "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "B":{"E":[5, 6, 7], "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "B":{"E":[7, 6 ,5], "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "F":{"E":[5, 6, 7], "D":4}}'
),
('[1, 2, true, false, null]', '[3, 4, 1]'),
('[1, 2, true, false, null]', '[3, 4, 5]'),
('[1,2,3]', '[]'),
('[1, 2, true, false, null]', '[3, 4, [1]]'),
(
'[1, 2, [true, false], null]',
'[[1], [true, false]]'
),
('[1, 2, 3, [4, 5, 6]]', '[7, 8, 9, [6, 5, 4]]'),
('[1, 2, true, false, null]', '{"A": 1}'),
(
'[1, 2, true, false, null, {"A":2}]',
'{"A": 1}'
),
('[1, {"A": 2}, {"A": 1}]', '{"A": 1}'),
(
'[1, 2, true, false, {"A": 1, "B": 2}]',
'{"A": 1, "B": 2}'
),
(
'[1, 2, true, false, {"A": 1, "B": 2}]',
'{"A": 1, "B": 3}'
),
(
'[1, 2, true, false, [{"A": 1, "B": 2}]]',
'{"A": 1, "B": 2}'
),
(
'[1, 2, true, false, [{"A": 1, "B": 2}]]',
'{"A": 1, "B": 3}'
),
(
'[1, 2, true, false, [{"A": 1, "B": 2}]]',
'{"A": 1}'
),
(
'[1, 2, true, false, {"A": 1, "B": {"C": 12}}]',
'{"A": 1, "B": {"C": 12}}'
),
(
'[1, 2, true, false, [{"A": 1, "B": {"C": 12}}]]',
'{"A": 1, "B": {"C": 12}}'
),
(
'[1, 2, true, false, [{"A": 1, "B": {"C": 12}}]]',
'{"A": 1, "B":{"C": 12}}'
),
(
'[[1, 2, true, false, {"A": 1, "B": {"C": 12}}]]',
'{"A": 1, "B": {"C": 12}}'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, {"A": 1, "B": 2}]'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, {"A": 1, "B": 3}]'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, {"A": 1, "B": 2}, {"A": 1, "B": 3}]'
),
(
'{"A": 1, "B": [1, 2, 3]}',
'[1, 2, true, false, {"A": 1, "B": 2}, {"A": 1, "B": [1, 2, 3]}]'
),
(
'{"A": 1, "B": [1, 2, {"C": 3, "D": 5}]}',
'[1, 2, true, false, {"A": 1, "B": 2}, {"A":1, "B":[1, 2, {"C": 3, "D": 5}]}]'
),
(
'{"A": 1, "B": [1, 2, {"C": 3, "D": 5}]}',
'[1, 2, true, false, {"A": 1, "B": 2},{"A": 1, "B": [1, 2, {"C": 3, "D": 4}]}]'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, [{"A": 1, "B": 2}, {"A": 1, "B": 3}]]'
);
SELECT
l1,
l2,
JSON_OVERLAPS(l1, l2) AS is_overlaps
FROM
t2;
l1 l2 is_overlaps
{"A":[1, 2, 3]} {} 0
{"A": 1} {"A": 1} 1
{"A": 1} {"B": 1} 0
{"A": 1, "B": "string1"} {"A": 2,"B": "string1"} 1
{"A": 1,"B": "string1"} {"A": 2,"B": "string2"} 0
{"A": 1,"B": {"C":2}} {"A": 2,"B": {"C":1}} 0
{"A": 1,"B": {"C":2}} {"A": 2,"B": {"C":2}} 1
{"A": {"B": true}} {"A": {"B": true,"C": false}} 0
{"A":1, "B":{"D":4, "E":5}} {"C":3, "B":{"E":5, "D":4}} 1
{"A":1, "B":{"D":4, "E":[5, 6, 7]}} {"C":3, "B":{"E":5, "D":4}} 0
{"A":1, "B":{"D":4, "E":[5, 6, 7]}} {"C":3, "B":{"E":[5, 6, 7], "D":4}} 1
{"A":1, "B":{"D":4, "E":[5, 6, 7]}} {"C":3, "B":{"E":[7, 6 ,5], "D":4}} 0
{"A":1, "B":{"D":4, "E":[5, 6, 7]}} {"C":3, "F":{"E":[5, 6, 7], "D":4}} 0
[1, 2, true, false, null] [3, 4, 1] 1
[1, 2, true, false, null] [3, 4, 5] 0
[1,2,3] [] 0
[1, 2, true, false, null] [3, 4, [1]] 0
[1, 2, [true, false], null] [[1], [true, false]] 1
[1, 2, 3, [4, 5, 6]] [7, 8, 9, [6, 5, 4]] 0
[1, 2, true, false, null] {"A": 1} 0
[1, 2, true, false, null, {"A":2}] {"A": 1} 0
[1, {"A": 2}, {"A": 1}] {"A": 1} 1
[1, 2, true, false, {"A": 1, "B": 2}] {"A": 1, "B": 2} 1
[1, 2, true, false, {"A": 1, "B": 2}] {"A": 1, "B": 3} 0
[1, 2, true, false, [{"A": 1, "B": 2}]] {"A": 1, "B": 2} 0
[1, 2, true, false, [{"A": 1, "B": 2}]] {"A": 1, "B": 3} 0
[1, 2, true, false, [{"A": 1, "B": 2}]] {"A": 1} 0
[1, 2, true, false, {"A": 1, "B": {"C": 12}}] {"A": 1, "B": {"C": 12}} 1
[1, 2, true, false, [{"A": 1, "B": {"C": 12}}]] {"A": 1, "B": {"C": 12}} 0
[1, 2, true, false, [{"A": 1, "B": {"C": 12}}]] {"A": 1, "B":{"C": 12}} 0
[[1, 2, true, false, {"A": 1, "B": {"C": 12}}]] {"A": 1, "B": {"C": 12}} 0
{"A": 1, "B": 3} [1, 2, true, false, {"A": 1, "B": 2}] 0
{"A": 1, "B": 3} [1, 2, true, false, {"A": 1, "B": 3}] 1
{"A": 1, "B": 3} [1, 2, true, false, {"A": 1, "B": 2}, {"A": 1, "B": 3}] 1
{"A": 1, "B": [1, 2, 3]} [1, 2, true, false, {"A": 1, "B": 2}, {"A": 1, "B": [1, 2, 3]}] 1
{"A": 1, "B": [1, 2, {"C": 3, "D": 5}]} [1, 2, true, false, {"A": 1, "B": 2}, {"A":1, "B":[1, 2, {"C": 3, "D": 5}]}] 1
{"A": 1, "B": [1, 2, {"C": 3, "D": 5}]} [1, 2, true, false, {"A": 1, "B": 2},{"A": 1, "B": [1, 2, {"C": 3, "D": 4}]}] 0
{"A": 1, "B": 3} [1, 2, true, false, [{"A": 1, "B": 2}, {"A": 1, "B": 3}]] 0
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_overlaps_db;

View File

@ -0,0 +1,168 @@
DROP DATABASE IF EXISTS json_quote_db;
CREATE DATABASE json_quote_db;
USE json_quote_db;
# ----------------------------------------------------------------------
# Test of JSON_QUOTE, JSON_UNQUOTE function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
INSERT INTO
t1
VALUES
(NULL);
SELECT
JSON_QUOTE(l, NULL)
FROM
t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_QUOTE'
SELECT
JSON_QUOTE(l, 'bar')
FROM
t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_QUOTE'
SELECT
JSON_UNQUOTE(l, NULL)
FROM
t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_UNQUOTE'
SELECT
JSON_UNQUOTE(l, 'bar')
FROM
t1;
ERROR 42000: Incorrect parameter count in the call to native function 'JSON_UNQUOTE'
# Null arg
SELECT
JSON_QUOTE(l)
FROM
t1;
JSON_QUOTE(l)
NULL
SELECT
JSON_UNQUOTE(l)
FROM
t1;
JSON_UNQUOTE(l)
NULL
# Calling based on encodings
TRUNCATE t1;
INSERT INTO
t1
VALUES
('abc');
SELECT
l AS raw,
JSON_QUOTE(CONVERT(l USING ascii)) AS quote_ascii,
JSON_QUOTE(CONVERT(l USING latin1)) AS quote_latin1,
JSON_QUOTE(CONVERT(l USING utf8)) AS quote_utf8,
JSON_QUOTE(CONVERT(l USING utf8mb4)) AS quote_utf8mb4
FROM
t1;
raw quote_ascii quote_latin1 quote_utf8 quote_utf8mb4
abc "abc" "abc" "abc" "abc"
# Chinese characters (normal in console,abnormal in test)
CREATE TABLE t2(l VARCHAR(50)) ENGINE = columnstore;
INSERT INTO
t2
VALUES
(X'e68891');
SELECT
*
FROM
t2;
l
SET
NAMES 'utf8';
# All should be the Chinese "I" i.e. 我
SELECT
JSON_QUOTE(CONVERT(l USING utf8)) AS quote_utf8,
JSON_COMPACT(JSON_QUOTE(CONVERT(l USING utf8))) AS compact_quote_utf8,
JSON_QUOTE(CONVERT(l USING utf8mb4)) AS quote_utf8mb4,
JSON_UNQUOTE(CONVERT(l USING utf8)) AS unquote_utf8
FROM
t2;
quote_utf8 compact_quote_utf8 quote_utf8mb4 unquote_utf8
"我" "我" "我" 我
# Do nothing
TRUNCATE t1;
INSERT INTO
t1
VALUES
('"');
SELECT
JSON_QUOTE(l),
JSON_UNQUOTE(l)
FROM
t1;
JSON_QUOTE(l) JSON_UNQUOTE(l)
"\"" "
TRUNCATE t1;
INSERT INTO
t1
VALUES
('""');
SELECT
JSON_UNQUOTE(l),
CHAR_LENGTH(JSON_UNQUOTE(l))
FROM
t1;
JSON_UNQUOTE(l) CHAR_LENGTH(JSON_UNQUOTE(l))
NULL 0
TRUNCATE t1;
INSERT INTO
t1
VALUES
('"" ');
SELECT
JSON_UNQUOTE(l)
FROM
t1;
JSON_UNQUOTE(l)
NULL
# Inconrrect type e.g. Integer
CREATE TABLE t3(i INT) ENGINE = columnstore;
INSERT INTO
t3
VALUES
(123);
SELECT
JSON_QUOTE(i)
FROM
t3;
JSON_QUOTE(i)
NULL
SELECT
JSON_UNQUOTE(i)
FROM
t3;
JSON_UNQUOTE(i)
123
# Round trip
TRUNCATE t1;
INSERT INTO
t1
VALUES
('abc');
SELECT
JSON_UNQUOTE(JSON_COMPACT(JSON_QUOTE(l)))
FROM
t1;
JSON_UNQUOTE(JSON_COMPACT(JSON_QUOTE(l)))
abc
SELECT
JSON_UNQUOTE(
JSON_UNQUOTE(
JSON_UNQUOTE(JSON_QUOTE(JSON_QUOTE(JSON_QUOTE(l))))
)
)
FROM
t1;
JSON_UNQUOTE(
JSON_UNQUOTE(
JSON_UNQUOTE(JSON_QUOTE(JSON_QUOTE(JSON_QUOTE(l))))
)
)
abc
DROP TABLE t1;
# DROP TABLE t2;
DROP TABLE t3;
DROP DATABASE json_quote_db;

View File

@ -0,0 +1,49 @@
DROP DATABASE IF EXISTS json_remove_db;
CREATE DATABASE json_remove_db;
USE json_remove_db;
# ----------------------------------------------------------------------
# Test of JSON_REMOVE function.
# ----------------------------------------------------------------------
CREATE TABLE t1(j TEXT, p TEXT) ENGINE = columnstore;
INSERT INTO
t1
VALUES
('["a", ["b", "c"], "d"]', '$[0]'),
('["a", ["b", "c"], "d"]', '$[1]'),
('["a", ["b", "c"], "d"]', '$[1][0]'),
('["a", ["b", "c"], "d"]', '$[0]');
SELECT
j AS json,
p AS path,
JSON_REMOVE(j, p) AS result
FROM
t1;
json path result
["a", ["b", "c"], "d"] $[0] [["b", "c"], "d"]
["a", ["b", "c"], "d"] $[1] ["a", "d"]
["a", ["b", "c"], "d"] $[1][0] ["a", ["c"], "d"]
["a", ["b", "c"], "d"] $[0] [["b", "c"], "d"]
CREATE TABLE t2(j TEXT, p TEXT) ENGINE = columnstore;
INSERT INTO
t2
VALUES
('{"a": 1, "b": [2, 3]}', '$.a'),
('{"a": 1, "b": [2, 3]}', '$.a[0]'),
('{"a": 1, "b": [2, 3]}', '$.b'),
('{"a": 1, "b": [2, 3]}', '$.b[0]'),
('{"a": 1, "b": [2, 3]}', '$.b[1]');
SELECT
j AS json,
p AS path,
JSON_REMOVE(j, p) AS result
FROM
t2;
json path result
{"a": 1, "b": [2, 3]} $.a {"b": [2, 3]}
{"a": 1, "b": [2, 3]} $.a[0] {"a": 1, "b": [2, 3]}
{"a": 1, "b": [2, 3]} $.b {"a": 1}
{"a": 1, "b": [2, 3]} $.b[0] {"a": 1, "b": [3]}
{"a": 1, "b": [2, 3]} $.b[1] {"a": 1, "b": [2]}
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_remove_db;

View File

@ -0,0 +1,121 @@
DROP DATABASE IF EXISTS json_search_db;
CREATE DATABASE json_search_db;
USE json_search_db;
# ----------------------------------------------------------------------
# Test of JSON_SEARCH function.
# ----------------------------------------------------------------------
# JSON_SEARCH with single path expression
CREATE TABLE t1(j TEXT, f TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
(
'["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]',
'one',
'abc'
),
(
'["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]',
'all',
'abc'
),
('{"x": "\\""}', "one", '"'),
('{"x": "\\""}', "one", '\\"');
SELECT
j AS json,
f AS return_arg,
v AS search_str,
j AS json, f AS return_arg, JSON_SEARCH(j, f, v) AS result
FROM
t1;
json return_arg search_str json return_arg result
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] one abc ["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] one "$[0]"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all abc ["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all ["$[0]", "$[2].x"]
{"x": "\""} one " {"x": "\""} one "$.x"
{"x": "\""} one \" {"x": "\""} one "$.x"
# JSON_SEARCH with path expression
CREATE TABLE t2(j TEXT, f TEXT, v TEXT, e TEXT, p TEXT) ENGINE = COLUMNSTORE;
SET
@j = '["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]';
INSERT INTO
t2
VALUES
(@j, 'all', 'abc', NULL, '$[0]'),
(@j, 'all', 'abc', NULL, '$[2]'),
(@j, 'all', '10', NULL, '$[1]'),
(@j, 'all', '10', NULL, '$[2]');
SELECT
j AS json,
f AS return_arg,
v AS search_str,
p AS path,
JSON_SEARCH(j, f, v, NULL, p) AS result
FROM
t2;
json return_arg search_str path result
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all abc $[0] "$[0]"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all abc $[2] "$[2].x"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all 10 $[1] "$[1][0].k"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all 10 $[2] NULL
# JSON_SEARCH with escape char
CREATE TABLE t3(j TEXT, f TEXT) ENGINE = COLUMNSTORE;
set @json = '[ "footbar", "foo%bar" ]';
INSERT INTO t3 VALUES(@json,'all');
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo%bar' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo%bar' )
[ "footbar", "foo%bar" ] all ["$[0]", "$[1]"]
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo\%bar' )
[ "footbar", "foo%bar" ] all "$[1]"
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo|%bar', '|' )
[ "footbar", "foo%bar" ] all "$[1]"
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]' )
[ "footbar", "foo%bar" ] all NULL
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]', '$[1]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]', '$[1]' )
[ "footbar", "foo%bar" ] all "$[1]"
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]', '$[1]', '$[2]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]', '$[1]', '$[2]' )
[ "footbar", "foo%bar" ] all "$[1]"
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar', null ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo\%bar', null )
[ "footbar", "foo%bar" ] all "$[1]"
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar', null, '$[0]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo\%bar', null, '$[0]' )
[ "footbar", "foo%bar" ] all NULL
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar', null, '$[1]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo\%bar', null, '$[1]' )
[ "footbar", "foo%bar" ] all "$[1]"
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]' )
[ "footbar", "foo%bar" ] all NULL
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[1]' ) FROM t3;
json return_arg JSON_SEARCH(j, f, 'foo|%bar', '|', '$[1]' )
[ "footbar", "foo%bar" ] all "$[1]"
# JSON_SEARCH in case-sensitive
CREATE TABLE t4(j TEXT, f TEXT) ENGINE = COLUMNSTORE;
INSERT INTO t4 VALUES('["abc", "ABC"]', 'all');
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'aBC') FROM t4;
json return_arg JSON_SEARCH(j, f, 'aBC')
["abc", "ABC"] all ["$[0]", "$[1]"]
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'abc') FROM t4;
json return_arg JSON_SEARCH(j, f, 'abc')
["abc", "ABC"] all ["$[0]", "$[1]"]
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'ABC') FROM t4;
json return_arg JSON_SEARCH(j, f, 'ABC')
["abc", "ABC"] all ["$[0]", "$[1]"]
drop TABLE t4;
drop TABLE t3;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_search_db;

View File

@ -0,0 +1,49 @@
DROP DATABASE IF EXISTS json_type_db;
CREATE DATABASE json_type_db;
USE json_type_db;
# ----------------------------------------------------------------------
# Test of JSON_TYPE function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = COLUMNSTORE;
# Error ER_INVALID_JSON_TEXT_IN_PARAM
INSERT INTO
t1
VALUES
('abc');
SELECT
JSON_TYPE(l)
FROM
t1;
JSON_TYPE(l)
NULL
# String literal - valid JSON
TRUNCATE t1;
INSERT INTO
t1
VALUES
('{"a": 2}'),
('[1,2]'),
('"scalar string"'),
('true'),
('false'),
('null'),
('1'),
('-0'),
('-0.0');
SELECT
l AS json,
JSON_TYPE(l) AS TYPE
FROM
t1;
json TYPE
{"a": 2} OBJECT
[1,2] ARRAY
"scalar string" STRING
true BOOLEAN
false BOOLEAN
null NULL
1 INTEGER
-0 INTEGER
-0.0 DOUBLE
DROP TABLE t1;
DROP DATABASE json_type_db;

View File

@ -0,0 +1,191 @@
DROP DATABASE IF EXISTS json_valid_db;
CREATE DATABASE json_valid_db;
USE json_valid_db;
# ----------------------------------------------------------------------
# Test of JSON_VALID function.
# ----------------------------------------------------------------------
#
# String literal - valid JSON
#
CREATE TABLE t1(l LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('123'),
('-123'),
('5000000000'),
('-5000000000'),
('1.23'),
('"123"'),
('true'),
('false'),
('null'),
('{"address": "Trondheim"}'),
(JSON_OBJECT()),
(JSON_OBJECT(1, 2)),
(JSON_ARRAY()),
(JSON_ARRAY(1, 2));
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
JSON_VALID(JSON_COMPACT(l)) AS compact
FROM
t1;
raw is_valid compact
123 1 1
-123 1 1
5000000000 1 1
-5000000000 1 1
1.23 1 1
"123" 1 1
true 1 1
false 1 1
null 1 1
{"address": "Trondheim"} 1 1
{} 1 1
{"1": 2} 1 1
[] 1 1
[1, 2] 1 1
#
# String literal - invalid JSON
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
('12 3'),
('{key:value}'),
('{key:value'),
('[1,2,]'),
('[1,2');
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
JSON_VALID(JSON_COMPACT(l)) AS compact
FROM
t1;
raw is_valid compact
12 3 0 NULL
{key:value} 0 NULL
{key:value 0 NULL
[1,2,] 0 NULL
[1,2 0 NULL
#
# String literal - not in UTF-8
#
TRUNCATE t1;
SET
NAMES 'ascii';
INSERT INTO
t1
VALUES
('123');
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
JSON_VALID(JSON_COMPACT(l)) AS compact
FROM
t1;
raw is_valid compact
123 1 1
SET
NAMES 'utf8';
#
# Bare NULL
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(NULL);
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
NULL
#
# Function result - string
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(UPPER('"abc"'));
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
1
#
# Function result - string not in UTF-8
#
TRUNCATE t1;
SET
NAMES 'latin1';
INSERT INTO
t1
VALUES
(UPPER('"abc"'));
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
1
SET
NAMES 'utf8';
#
# Function result - date, not valid as JSON without CAST
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(CAST('2015-01-15' AS DATE));
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
0
#
# The date string doesn't parse as JSON text, so wrong:
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(
CAST(
CAST('2015-01-15' AS DATE) AS CHAR CHARACTER SET 'utf8'
)
);
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
0
#
# Function result - NULL
#
TRUNCATE t1;
INSERT INTO
t1
VALUES
(UPPER(NULL));
INSERT INTO
t1
VALUES
(UPPER(CAST(NULL AS CHAR)));
SELECT
JSON_VALID(l)
FROM
t1;
JSON_VALID(l)
NULL
NULL
DROP TABLE t1;
DROP DATABASE json_valid_db;

View File

@ -0,0 +1,51 @@
DROP DATABASE IF EXISTS json_value_db;
CREATE DATABASE json_value_db;
USE json_value_db;
# ----------------------------------------------------------------------
# Test of JSON_VALUE function.
# ----------------------------------------------------------------------
CREATE TABLE t1(s TEXT, p TEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"key1":123}', '$.key2'),
('{"key1":123}', '$.key1'),
('{"key1":[1,2,3]}', '$.key1'),
('{"key1": [1,2,3], "key1":123}', '$.key1'),
('{ "x": [0,1], "y": "[0,1]", "z": "Mon\\\"t\\\"y" }','$.z'),
('{"\\"key1":123}', '$."\\"key1"'),
('{"\\"key1\\"":123}', '$."\\"key1\\""'),
('{"key 1":123}', '$."key 1"');
SELECT s as json_text, p as path, JSON_VALUE(s, p) as json_value, JSON_QUERY(s, p) as json_query
FROM t1;
json_text path json_value json_query
{"key1":123} $.key2 NULL NULL
{"key1":123} $.key1 123 NULL
{"key1":[1,2,3]} $.key1 NULL [1,2,3]
{"key1": [1,2,3], "key1":123} $.key1 123 [1,2,3]
{ "x": [0,1], "y": "[0,1]", "z": "Mon\"t\"y" } $.z Mon"t"y NULL
{"\"key1":123} $."\"key1" 123 NULL
{"\"key1\"":123} $."\"key1\"" 123 NULL
{"key 1":123} $."key 1" 123 NULL
CREATE TABLE t2(s TEXT) ENGINE = columnstore;
INSERT INTO t2 VALUES('{"key1":123, "key2":{"key3":"value3"}}'),
('{"key1":123, "key3":[1,2,3]}'),
('{"key1":123, "key2":"[1]"}');
SELECT s as json_text, '$.key1' , JSON_VALUE(s, '$.key1') as json_value, JSON_QUERY(s, '$.key1') as json_query
FROM t2;
json_text $.key1 json_value json_query
{"key1":123, "key2":{"key3":"value3"}} $.key1 123 NULL
{"key1":123, "key3":[1,2,3]} $.key1 123 NULL
{"key1":123, "key2":"[1]"} $.key1 123 NULL
SELECT s as json_text, '$.key2' , JSON_VALUE(s, '$.key2') as json_value, JSON_QUERY(s, '$.key2') as json_query
FROM t2;
json_text $.key2 json_value json_query
{"key1":123, "key2":{"key3":"value3"}} $.key2 NULL {"key3":"value3"}
{"key1":123, "key3":[1,2,3]} $.key2 NULL NULL
{"key1":123, "key2":"[1]"} $.key2 [1] NULL
SELECT s as json_text, '$.key3' , JSON_VALUE(s, '$.key3') as json_value, JSON_QUERY(s, '$.key3') as json_query
FROM t2;
json_text $.key3 json_value json_query
{"key1":123, "key2":{"key3":"value3"}} $.key3 NULL NULL
{"key1":123, "key3":[1,2,3]} $.key3 NULL [1,2,3]
{"key1":123, "key2":"[1]"} $.key3 NULL NULL
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_value_db;

View File

@ -0,0 +1,197 @@
DROP DATABASE IF EXISTS json_range_expr_db;
CREATE DATABASE json_range_expr_db;
USE json_range_expr_db;
# Range expression is supported in MariaDB 10.9 binaries
CREATE TABLE t2(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '{
"A": [0,
[1, 2, 3],
[4, 5, 6],
"seven",
0.8,
true,
false,
"eleven",
[12, 13, {"key1":"value1"},[15]],
true],
"B": {"C": 1},
"D": 2
}';
INSERT INTO
t2
VALUES
(@json, '$.A[-2][-1]'),
(@json, '$.A[last-1][last]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t2;
j p result
{
"A": [0,
[1, 2, 3],
[4, 5, 6],
"seven",
0.8,
true,
false,
"eleven",
[12, 13, {"key1":"value1"},[15]],
true],
"B": {"C": 1},
"D": 2
} $.A[-2][-1] 1
{
"A": [0,
[1, 2, 3],
[4, 5, 6],
"seven",
0.8,
true,
false,
"eleven",
[12, 13, {"key1":"value1"},[15]],
true],
"B": {"C": 1},
"D": 2
} $.A[last-1][last] 1
# Test case 2
CREATE TABLE t3(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
]';
INSERT INTO
t3
VALUES
(@json, '$[3][3][-2 to last]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t3;
j p result
[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
] $[3][3][-2 to last] 1
# Test case 3
CREATE TABLE t4(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
]';
INSERT INTO
t4
VALUES
(@json, '$[2][2][1 to 2]'),
(@json, '$[2][2][4 to 6]'),
(@json, '$[2][2][1 to 4]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t4;
j p result
[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
] $[2][2][1 to 2] 1
[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
] $[2][2][4 to 6] 0
[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
] $[2][2][1 to 4] 1
# JSON_EXTRACT
CREATE TABLE t5(j LONGTEXT, p LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t5
VALUES
('[1, "val2", [3.1, -4]]', '$'),
('1', '$'),
('[10, 20, [30, 40]]', '$[2][*]'),
('[10, 20, [{"a":3}, 30, 40]]', '$[2][*]'),
(json_object('foo', 'foobar'), '$');
SELECT
j,
p,
JSON_EXTRACT(j, p) AS result
FROM
t5;
j p result
[1, "val2", [3.1, -4]] $ [1, "val2", [3.1, -4]]
1 $ 1
[10, 20, [30, 40]] $[2][*] [30, 40]
[10, 20, [{"a":3}, 30, 40]] $[2][*] [{"a": 3}, 30, 40]
{"foo": "foobar"} $ {"foo": "foobar"}
# JSON_EXTRACT
CREATE TABLE t6(j TEXT, f TEXT, v TEXT, e TEXT, p TEXT) ENGINE = COLUMNSTORE;
SET
@j = '["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]';
INSERT INTO
t6
VALUES
(@j, 'all', 'abc', NULL, '$'),
(@j, 'all', '10', NULL, '$'),
(@j, 'all', '10', NULL, '$[*]'),
(@j, 'all', '10', NULL, '$[*][0].k'),
(@j, 'all', '10', NULL, '$**.k');
SELECT
j AS json,
f AS return_arg,
v AS search_str,
e AS escape_char,
p AS path,
JSON_SEARCH(j, f, v, NULL, p) AS result
FROM
t6;
json return_arg search_str escape_char path result
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all abc NULL $ ["$[0]", "$[2].x"]
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all 10 NULL $ "$[1][0].k"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all 10 NULL $[*] "$[1][0].k"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all 10 NULL $[*][0].k "$[1][0].k"
["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}] all 10 NULL $**.k "$[1][0].k"
DROP TABLE t6;
DROP TABLE t5;
DROP TABLE t4;
DROP TABLE t3;
DROP TABLE t2;
DROP DATABASE json_range_expr_db;

View File

@ -0,0 +1,21 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_array_db;
--enable_warnings
CREATE DATABASE json_array_db;
USE json_array_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_ARRAY function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l text) ENGINE = COLUMNSTORE;
INSERT INTO t1 VALUES('1');
SELECT JSON_ARRAY() FROM t1;
SELECT JSON_ARRAY(l) FROM t1;
SELECT JSON_ARRAY(l, null, 'My name is "Foo"', 3.1415, 6) FROM t1;
SELECT JSON_ARRAY(l, true, false, "true", "false") FROM t1;
SELECT JSON_ARRAY(l, '{"key1":"value1"}') FROM t1;
SELECT JSON_ARRAY(l, JSON_COMPACT( '{"key1":"value1"}' ) ) FROM t1;
DROP TABLE t1;
DROP DATABASE json_array_db;

View File

@ -0,0 +1,97 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_array_append_db;
--enable_warnings
CREATE DATABASE json_array_append_db;
USE json_array_append_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_ARRAY_APPEND function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(a TEXT, p TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('[1,2,3]', '$[0]', 2),
('[1,2,3]', '$[0]', 1.2),
('[1,2,3]', '$[0]', 'key1'),
('[1,2,3]', '$[0]', TRUE),
('[1,2,3]', '$[0]', false),
('[1,2,3]', '$[0]', NULL),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.b',
4
),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.c',
'grape'
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.b',
4
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.c',
'grape'
);
SELECT
a AS arrary,
p AS path,
v AS value,
JSON_ARRAY_APPEND(a, p, v) AS result
FROM
t1;
--echo # NULL args
CREATE TABLE t2(a TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
('[1,2,3]');
SELECT
JSON_ARRAY_APPEND(a, NULL, JSON_COMPACT(1)),
JSON_ARRAY_APPEND(a, '$', NULL)
FROM
t2;
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$.b');
SELECT
JSON_ARRAY_APPEND(NULL, a, JSON_COMPACT(1)),
JSON_ARRAY_APPEND('[1,2,3]', a, NULL)
FROM
t2;
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$**[0]');
--echo error ER_INVALID_JSON_PATH_WILDCARD
SELECT
JSON_ARRAY_APPEND(JSON_COMPACT('{"a": {"b": [3]}}'), a, 6)
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_array_append_db;

View File

@ -0,0 +1,97 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_array_insert_db;
--enable_warnings
CREATE DATABASE json_array_insert_db;
USE json_array_insert_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_ARRAY_INSERT function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(a TEXT, p TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('[1,2,3]', '$[0]', 2),
('[1,2,3]', '$[0]', 1.2),
('[1,2,3]', '$[0]', 'key1'),
('[1,2,3]', '$[0]', TRUE),
('[1,2,3]', '$[0]', false),
('[1,2,3]', '$[0]', NULL),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.b',
4
),
(
'{"a": "foo", "b": "bar", "c": "wibble" }',
'$.c',
'grape'
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.b',
4
),
(
'{"a": "foo", "b": [1,2,3], "c": ["apple","pear"]}',
'$.c',
'grape'
);
SELECT
a AS arrary,
p AS path,
v AS value,
JSON_ARRAY_INSERT(a, p, v) AS result
FROM
t1;
--echo # NULL args
CREATE TABLE t2(a TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
('[1,2,3]');
SELECT
JSON_ARRAY_INSERT(a, NULL, JSON_COMPACT(1)),
JSON_ARRAY_INSERT(a, '$', NULL)
FROM
t2;
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$.b');
SELECT
JSON_ARRAY_INSERT(NULL, a, JSON_COMPACT(1)),
JSON_ARRAY_INSERT('[1,2,3]', a, NULL)
FROM
t2;
TRUNCATE t2;
INSERT INTO
t2
VALUES
('$**[0]');
--echo error ER_INVALID_JSON_PATH_WILDCARD
SELECT
JSON_ARRAY_INSERT(JSON_COMPACT('{"a": {"b": [3]}}'), a, 6)
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_array_insert_db;

View File

@ -0,0 +1,70 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_contains_db;
--enable_warnings
CREATE DATABASE json_contains_db;
USE json_contains_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_CONTAINS function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(j LONGTEXT, v LONGTEXT, p LONGTEXT) ENGINE = columnstore;
INSERT INTO
t1
VALUES
('{"k1":123, "k2":345}', '123', '$.k1'),
('', '', '$'),
('null', 'null', '$'),
('"10"', '"10"', '$'),
('"10"', '10', '$'),
('10.1', '10', '$'),
('10.0', '10', '$');
SELECT
j AS json,
v AS value,
p AS path,
JSON_CONTAINS(j, v, p) AS result
FROM
t1;
CREATE TABLE t2(j LONGTEXT, v LONGTEXT) ENGINE = columnstore;
INSERT INTO
t2
VALUES
('"you"', '"you"'),
('"youth"', '"you"'),
('[1]', '1'),
('[2, 1]', '1'),
('[2, [2, 3], 1]', '1'),
('[4, [2, 3], 1]', '2'),
('[2, 1]', '[1, 2]'),
('[2, 1]', '[1, 0, 2]'),
('[2, 0, 3, 1]', '[1, 2]'),
('{"b":[1,2], "a":1}', '{"a":1, "b":2}'),
('{"a":1}', '{}'),
('[1, {"a":1}]', '{}'),
('[1, {"a":1}]', '{"a":1}'),
('[{"abc":"def", "def":"abc"}]', '["foo","bar"]'),
(
'[{"abc":"def", "def":"abc"}, "bar"]',
'["bar", {}]'
),
('[{"a":"b"},{"c":"d"}]', '{"c":"d"}');
SELECT
j AS json,
v AS value,
JSON_CONTAINS(j, v) AS result
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_contains_db;

View File

@ -0,0 +1,71 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_contains_path_db;
--enable_warnings
CREATE DATABASE json_contains_path_db;
USE json_contains_path_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_CONTAINS_PATH function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(j TEXT, r TEXT, p TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('{"key1":1, "key2":[2,3]}', "oNE", "$.key2[1]"),
('{"key1":1, "key2":[2,3]}', "oNE", "$.key2[10]"),
('{"key1":1, "key2":[2,3]}', "oNE", "$.ma"),
('{"key1":1, "key2":[2,3]}', "one", "$.key1"),
('{ "a": true }', NULL, '$.a'),
('{ "a": true }', 'all', NULL),
('{"a":{"b":"c"}}', 'one', '$.a.*');
SELECT
j AS json,
r AS return_flag,
p AS path,
JSON_CONTAINS_PATH(j, r, p) AS result
FROM
t1;
CREATE TABLE t2(j TEXT, r TEXT, p1 TEXT, p2 TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
(
'{"key1":1, "key2":[2,3]}',
"one",
"$.key1",
"$.ma"
),
(
'{"key1":1, "key2":[2,3]}',
"aLl",
"$.key1",
"$.ma"
),
(
'{"key1":1, "key2":[2,3]}',
"aLl",
"$.key1",
"$.key2"
);
SELECT
j AS json,
r AS return_flag,
p1 AS path,
p2 AS path,
JSON_CONTAINS_PATH(j, r, p1, p2) AS result
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_contains_path_db;

View File

@ -0,0 +1,45 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_depth_db;
--enable_warnings
CREATE DATABASE json_depth_db;
USE json_depth_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_DEPTH function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
--echo # Return NULL
INSERT INTO t1 VALUES(NULL);
SELECT JSON_DEPTH(l) FROM t1;
--echo # Return 1
TRUNCATE t1;
INSERT INTO t1 VALUES('{}');
INSERT INTO t1 VALUES('[]');
INSERT INTO t1 VALUES('"abc"');
SELECT JSON_DEPTH(l) FROM t1;
--echo # Error ER_INVALID_TYPE_FOR_JSON
CREATE TABLE t2(i int) ENGINE=columnstore;
INSERT INTO t2 VALUES(1);
SELECT JSON_DEPTH(i) FROM t2;
DROP TABLE t2;
--echo # Error ER_INVALID_JSON_TEXT_IN_PARAM
TRUNCATE t1;
INSERT INTO t1 VALUES('abc');
INSERT INTO t1 VALUES('[ "a", true, "b" , { "e" : false }, "c" , null');
SELECT JSON_DEPTH(l) FROM t1;
--echo # Return 2
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "a" : true, "b" : false, "c" : null }');
INSERT INTO t1 VALUES('[ "a", true, "b" , false, "c" , null ]');
INSERT INTO t1 VALUES('{ "a" : true, "b" : {}, "c" : null }');
INSERT INTO t1 VALUES('[ "a", true, "b" , {}, "c" , null ]');
SELECT JSON_DEPTH(l) FROM t1;
--echo # Return 3
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "a" : true, "b" : { "e" : false }, "c" : null }');
SELECT JSON_DEPTH(l) FROM t1;
DROP TABLE t1;
DROP DATABASE json_depth_db;

View File

@ -0,0 +1,34 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_equals_db;
--enable_warnings
CREATE DATABASE json_equals_db;
USE json_equals_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_EQUALS function.
--echo # ----------------------------------------------------------------------
--echo # Return 1
CREATE TABLE t1(l LONGTEXT, r LONGTEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"a":1,"b":2}','{"a":1,"b":2}');
INSERT INTO t1 VALUES('{"a":1,"b":2}','{"b":2,"a":1}');
INSERT INTO t1 VALUES('{"a":1,"b":2}','{"a": 1,"b": 2}');
INSERT INTO t1 VALUES('{"a": 1,"b":2}','{"b":2,"a":1}');
INSERT INTO t1 VALUES('[1,2]','[1,2]');
INSERT INTO t1 VALUES('[1,2]','[1 , 2]');
INSERT INTO t1 VALUES(1,1);
SELECT JSON_EQUALS(l, r) FROM t1;
--echo # Return 0
TRUNCATE t1;
INSERT INTO t1 VALUES('{"a":1,"b":3}','{"a":1,"b":2}');
INSERT INTO t1 VALUES('[1,2]','[2,1]');
INSERT INTO t1 VALUES(1,2);
SELECT JSON_EQUALS(l, r) FROM t1;
--echo # NULL
TRUNCATE t1;
INSERT INTO t1 VALUES('["a",true,{"e":false},null','["a",true,{"e":false},null');
INSERT INTO t1 VALUES('s1',"s1");
SELECT JSON_EQUALS(l, r) FROM t1;
DROP TABLE t1;
DROP DATABASE json_equals_db;

View File

@ -0,0 +1,37 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_exists_db;
--enable_warnings
CREATE DATABASE json_exists_db;
USE json_exists_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_EXISTS function.
--echo # ----------------------------------------------------------------------
--echo # Test case 0
CREATE TABLE t1(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '{"key1":"xxxx", "key2":[1, 2, 3]}';
INSERT INTO
t1
VALUES
(@json, '$.key1'),
(@json, '$.key1[0]'),
(@json, '$.key2'),
(@json, '$.key2[1]'),
(@json, '$.key2[10]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t1;
DROP TABLE t1;
DROP DATABASE json_exists_db;

View File

@ -0,0 +1,81 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_extract_db;
--enable_warnings
CREATE DATABASE json_extract_db;
USE json_extract_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_EXTRACT function.
--echo # ----------------------------------------------------------------------
--echo
--echo # Single path expression
--echo
CREATE TABLE t1(j LONGTEXT, p LONGTEXT) ENGINE = COLUMNSTORE;
SET
@json = '[1, "val2", [3.1, -4]]';
INSERT INTO
t1
VALUES
(@json, '$[0]'),
(@json, '$[1]'),
(@json, '$[2]'),
(@json, '$[3]'),
(@json, '$[2][0]'),
(@json, '$[2][1]'),
(@json, '$[2][10]'),
(@json, '$'),
('1', '$'),
('[10, 20, [30, 40], 1, 10]', '$[1]'),
('{"key1":"asd", "key2":[2,3]}', "$.key1"),
('{"key0":true, "key1":"qwe"}', "$.key1"),
('[10, 20, [30, 40]]', '$[2][*]'),
('[10, 20, [{"a":3}, 30, 40]]', '$[2][*]'),
(json_object('foo', 'foobar'), '$');
SELECT
j,
p,
JSON_EXTRACT(j, p) AS result
FROM
t1;
--echo
--echo # Multiple path expression
--echo
CREATE TABLE t2(j LONGTEXT, p1 LONGTEXT, p2 LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
(
'{"key1":"asd", "key2":[2,3]}',
"$.keyX",
"$.keyY"
),
(
'{"key1":"asd", "key2":[2,3]}',
"$.key1",
"$.key2"
),
('{"key1":5, "key2":[2,3]}', "$.key1", "$.key2"),
('[10, 20, [30, 40], 1, 10]', '$[1]', '$[25]'),
('[{"a": [3, 4]}, {"b": 2}]', '$[0].a', '$[1].a');
SELECT
j,
p1,
p2,
JSON_EXTRACT(j, p1, p2) AS result
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_extract_db;

View File

@ -0,0 +1,56 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_insert_de;
--enable_warnings
CREATE DATABASE json_insert_de;
USE json_insert_de;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_INSERT|REPLACE|SET function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(j TEXT, p TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
(
'{"a":1, "b":{"c":1}, "d":[1, 2]}',
'$.b.k1',
'word'
),
('{"a":1, "b":{"c":1}, "d":[1, 2]}', '$.d[3]', 3),
('{"a":1, "b":{"c":1}, "d":[1, 2]}', '$.a[2]', 2),
(
'{"a":1, "b":{"c":1}, "d":[1, 2]}',
'$.b.c',
'word'
),
('1', '$[0]', 4),
('[]', '$[0][0]', 100),
('1', '$[0][0]', 100),
(
'{ "a": 1, "b": [2, 3]}',
'$.a',
10
),
(
'{ "a": 1, "b": [2, 3]}',
'$.b',
'[true, false]'
);
SELECT
j AS json,
p AS path,
v AS value,
JSON_INSERT(j, p, v) AS json_insert,
JSON_REPLACE(j, p, v) AS json_replace,
JSON_SET(j, p, v) AS json_set
FROM
t1;
DROP TABLE t1;
DROP DATABASE json_insert_de;

View File

@ -0,0 +1,21 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_keys_db;
--enable_warnings
CREATE DATABASE json_keys_db;
USE json_keys_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_KEYS function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l TEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"key1":"value1", "key2":{"key3": "value2"}}');
INSERT INTO t1 VALUES('{"key1":"value1", "key2":{"key3": "value3", "key4":"value4"}}');
INSERT INTO t1 VALUES('{"key1":"value1" "key2":{"key3": "value3", "key4":"value4"}}');
SELECT JSON_KEYS(l) from t1;
SELECT JSON_KEYS(l, '$.key2') from t1;
SELECT JSON_KEYS(l, '$.key1') from t1;
SELECT JSON_KEYS(l, '$.key123') from t1;
DROP TABLE t1;
DROP DATABASE json_keys_db;

View File

@ -0,0 +1,81 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_length_db;
--enable_warnings
CREATE DATABASE json_length_db;
USE json_length_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_LENGTH function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
--echo # Invalid json text
INSERT INTO t1 VALUES(null);
INSERT INTO t1 VALUES('1');
INSERT INTO t1 VALUES('abc');
INSERT INTO t1 VALUES('"abc"');
INSERT INTO t1 VALUES('true');
INSERT INTO t1 VALUES('false');
INSERT INTO t1 VALUES('null');
--echo error ER_INVALID_JSON_TEXT_IN_PARAM
SELECT JSON_LENGTH(l) FROM t1;
--echo # Valid json text
TRUNCATE t1;
INSERT INTO t1 VALUES('{}');
INSERT INTO t1 VALUES('{ "a" : 100, "b" : 200 }');
INSERT INTO t1 VALUES('{ "a" : 100, "b" : [ 300, 400, 500 ] }');
INSERT INTO t1 VALUES('[]');
INSERT INTO t1 VALUES('[ null, "foo", true, 1.1 ]');
INSERT INTO t1 VALUES('[ null, "foo", true, { "a" : "b", "c" : "d" } ]');
INSERT INTO t1 VALUES('"foo"');
INSERT INTO t1 VALUES('1.2');
SELECT JSON_LENGTH(l) FROM t1;
--echo # Bad path expressions
TRUNCATE t1;
INSERT INTO t1 VALUES('true');
--echo error ER_INVALID_JSON_PATH
SELECT JSON_LENGTH(l, 'c$') FROM t1;
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "foo" : [ true, false ] }');
--echo error ER_INVALID_JSON_PATH
SELECT JSON_LENGTH(l, '$.foo[bar]') FROM t1;
--echo # wildcards not allowed in path expressions for this function
TRUNCATE t1;
INSERT INTO t1 VALUES('true');
--echo error ER_INVALID_JSON_PATH_WILDCARD
SELECT JSON_LENGTH(l, '$.*') FROM t1;
TRUNCATE t1;
INSERT INTO t1 VALUES('true');
--echo error ER_INVALID_JSON_PATH_WILDCARD
SELECT JSON_LENGTH(l, '$.foo**.bar') FROM t1;
--echo # Error ER_INVALID_JSON_TEXT_IN_PARAM
TRUNCATE t1;
INSERT INTO t1 VALUES('abc');
INSERT INTO t1 VALUES('[ "a", true, "b" , { "e" : false }, "c" , null');
SELECT JSON_LENGTH(l) FROM t1;
--echo # Path exist
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, [ 2, 3, 4 ], 5 ]');
SELECT JSON_LENGTH(l, '$[0]') FROM t1;
SELECT JSON_LENGTH(l, '$[1]') FROM t1;
SELECT JSON_LENGTH(l, '$[2]') FROM t1;
SELECT JSON_LENGTH(l, '$[2][0]') FROM t1;
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, [ 2, 3, 4 ], {"a": 1} ]');
SELECT JSON_LENGTH(l, '$[2][0]') FROM t1;
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, { "a": true, "b": false, "c": null }, 5 ]');
SELECT JSON_LENGTH(l, '$[1]') FROM t1;
TRUNCATE t1;
INSERT INTO t1 VALUES('{ "a" : 123, "b" : [ 123, 456, 789 ] }');
SELECT JSON_LENGTH(l) FROM t1;
SELECT JSON_LENGTH(l, '$.b') FROM t1;
SELECT JSON_LENGTH(l, '$.c') FROM t1;
--echo # No-existent path
TRUNCATE t1;
INSERT INTO t1 VALUES('[ 1, [ 2, 3, 4 ], 5 ]');
SELECT JSON_LENGTH(l, '$[2][1]') FROM t1;
DROP TABLE t1;
DROP DATABASE json_length_db;

View File

@ -0,0 +1,79 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_merge_db;
--enable_warnings
CREATE DATABASE json_merge_db;
USE json_merge_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_MERGE function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l TEXT, r TEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('[1, 2, 3]','[4, 5, 6]');
--echo # Not enough args
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT JSON_MERGE() FROM t1;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT JSON_MERGE(l) FROM t1;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT JSON_MERGE(NULL) FROM t1;
--echo # Null args
SELECT JSON_MERGE(NULL, r) FROM t1;
SELECT JSON_MERGE(l, NULL) FROM t1;
SELECT JSON_MERGE(NULL, l, r) FROM t1;
SELECT JSON_MERGE(l, NULL, r) FROM t1;
SELECT JSON_MERGE(l, r, NULL) FROM t1;
--echo # Invalid JSON text
--echo error ER_INVALID_JSON_TEXT_IN_PARAM
SELECT JSON_MERGE(l, '[4, 5, 6') FROM t1;
--echo error ER_INVALID_JSON_TEXT_IN_PARAM
SELECT JSON_MERGE('[1, 2, 3', r) FROM t1;
--echo # Good JSON_MERGE() Expressions
TRUNCATE t1;
INSERT INTO t1 VALUES('1', '2' );
INSERT INTO t1 VALUES('1', '[2, 3]' );
INSERT INTO t1 VALUES('[1, 2]', '3' );
INSERT INTO t1 VALUES('1', '{ "a": 2 }' );
INSERT INTO t1 VALUES('{ "a": 2 }', '1' );
INSERT INTO t1 VALUES('[1, 2]', '[3, 4]' );
INSERT INTO t1 VALUES('{ "a": 2 }', '{ "b": 3}' );
INSERT INTO t1 VALUES('[1, 2]', '{ "a": 2 }' );
INSERT INTO t1 VALUES('{ "a": 2 }', '[1, 2]' );
INSERT INTO t1 VALUES('{"a": 1, "b": 2 }', '{"b": 3, "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": 2 }', '{"b": [3, 4], "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": [2, 3] }', '{"b": 4, "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": 2 }', '{"b": {"e": 7, "f": 8}, "d": 4 }' );
INSERT INTO t1 VALUES('{"b": {"e": 7, "f": 8}, "d": 4 }', '{"a": 1, "b": 2 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": [2, 9] }', '{"b": [10, 11], "d": 4 }' );
INSERT INTO t1 VALUES('{"a": 1, "b": [2, 9] }', '{"b": {"e": 7, "f": 8}, "d": 4 }' );
INSERT INTO t1 VALUES('{"b": {"e": 7, "f": 8}, "d": 4 }', '{"a": 1, "b": [2, 9] }' );
INSERT INTO t1 VALUES('{"b": {"e": 7, "f": 8}, "d": 4 }', '{ "a": 1, "b": {"e": 20, "g": 21 } }' );
INSERT INTO t1 VALUES('{ "a" : "foo", "b" : [ true, { "c" : 123 } ] }', '[ 5, 6]');
INSERT INTO t1 VALUES('{ "a" : "foo", "b" : [ true, { "c" : 123 } ] }', '{ "b": [ false, 34 ] }');
INSERT INTO t1 VALUES('{ "a" : "foo", "b" : [ true, { "c" : 123 } ] }','{ "b": "bar" }');
INSERT INTO t1 VALUES('{ "a" : { "b" : 1 } }','{ "a" : { "c" : 1 } }');
SELECT JSON_MERGE(l, r) FROM t1;
CREATE TABLE t2(l1 TEXT, l2 TEXT, l3 TEXT) ENGINE = columnstore;
INSERT INTO t2 VALUES('1', '2', '3' );
INSERT INTO t2 VALUES('[1, 2 ]', '3', '[4, 5]' );
INSERT INTO t2 VALUES
(
'{ "a": true, "b": { "c": 3, "d": 4 }, "e": [ 1, 2 ] }',
'{ "d": false, "b": { "g": 3, "d": 5 }, "f": [ 1, 2 ] }',
'{ "m": true, "b": { "h": 8, "d": 4 }, "e": [ 3, 4 ] }'
);
SELECT JSON_MERGE(l1, l2, l3) from t2;
DROP TABLE t1;
DROP TABLE t2;
DROP DATABASE json_merge_db;

View File

@ -0,0 +1,54 @@
DROP DATABASE IF EXISTS json_merge_patch_db;
CREATE DATABASE json_merge_patch_db;
USE json_merge_patch_db;
# ----------------------------------------------------------------------
# Test of JSON_MERGE_PATCH function.
# ----------------------------------------------------------------------
CREATE TABLE t1(l1 TEXT, l2 TEXT) ENGINE = columnstore;
INSERT INTO t1(l1, l2) VALUES
('{"a":"b"}', '{"a":"c"}'),
('{"a":"b"}', '{"b":"c"}'),
('{"a":"b"}', '{"a":null}'),
('{"a":"b", "b":"c"}', '{"a":null}'),
('{"a":["b"]}', '{"a":"c"}'),
('{"a":"c"}', '{"a":["b"]}'),
('{"a": {"b":"c"}}', '{"a": {"b":"d", "c":null}}'),
('{"a":[{"b":"c"}]}', '{"a": [1]}'),
('["a","b"]', '["c","d"]'),
('{"a":"b"}', '["c"]'),
('{"a":"foo"}', 'null'),
('{"a":"foo"}', '"bar"'),
('{"e":null}', '{"a":1}'),
('[1,2]', '{"a":"b", "c":null}'),
('{}', '{"a":{"bb":{"ccc":null}}}'),
(NULL, '{}'),
('{}', NULL);
SELECT l1, l2,
JSON_MERGE_PATCH(l1, l2) AS `l1 + l2`
FROM t1;
DROP TABLE t1;
CREATE TABLE t2(l1 TEXT, l2 TEXT, l3 TEXT) ENGINE = columnstore;
INSERT INTO t2 VALUES
('{"a":"b"}', NULL, '{"c":"d"}'),
(NULL, '[1,2,3]', '[4,5,6]'),
(NULL, 'a', 'b'),
('{"a":"b"}', '[1,2,3]', '{"c":null,"d":"e"}');
SELECT l1, l2, l3,
JSON_MERGE_PATCH(l1, l2, l3) AS merged
FROM t2;
DROP TABLE t2;
CREATE TABLE t3(l1 TEXT, l2 TEXT) ENGINE = columnstore;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT JSON_MERGE_PATCH() FROM t3;
INSERT INTO t3(l1, l2) VALUES('{}', '{"a":"c"}');
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT l1, JSON_MERGE_PATCH(l1) AS merged FROM t3;
INSERT INTO t3(l1, l2) VALUES
('{', '[1,2,3]'),
('{"a":"b"}', '[1,');
SELECT l1, l2, JSON_MERGE_PATCH(l1, l2) AS merged FROM t3;
DROP TABLE t3;
DROP DATABASE json_merge_patch_db;

View File

@ -0,0 +1,27 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_normalize_db;
--enable_warnings
CREATE DATABASE json_normalize_db;
USE json_normalize_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_NORMALIZE function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"key2":"v2","key1":"v1"}');
INSERT INTO t1 VALUES('{"key2": "v2", "key1":"v1"}');
INSERT INTO t1 VALUES('{"key1": "v2", "key1":"v1"}');
INSERT INTO t1 VALUES('{"key1": "v2", "key2":"v1"}');
INSERT INTO t1 VALUES('{}');
INSERT INTO t1 VALUES('{ }');
INSERT INTO t1 VALUES('"123"');
INSERT INTO t1 VALUES('[ 1,2,3]');
INSERT INTO t1 VALUES('[]');
INSERT INTO t1 VALUES('[ ]');
INSERT INTO t1 VALUES(null);
INSERT INTO t1 VALUES('{"key1":value1}');
SELECT JSON_NORMALIZE(l) from t1;
DROP TABLE t1;
DROP DATABASE json_normalize_db;

View File

@ -0,0 +1,47 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_object_db;
--enable_warnings
CREATE DATABASE json_object_db;
USE json_object_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_OBJECT function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l TEXT) ENGINE = COLUMNSTORE;
INSERT INTO t1 VALUES('a');
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT json_object(l) FROM t1;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT json_object(l, 1, 'b') FROM t1;
--echo # Null arg
TRUNCATE t1;
INSERT INTO t1 values(null);
SELECT JSON_OBJECT(l, 1) FROM t1;
SELECT JSON_OBJECT(1, l) FROM t1;
--echo # Valid arg
TRUNCATE t1;
INSERT INTO t1 values('a');
SELECT JSON_OBJECT(l, null) FROM t1;
SELECT JSON_OBJECT(l, 1) FROM t1;
SELECT JSON_OBJECT(l, 1, 'b', 'foo') FROM t1;
SELECT JSON_OBJECT(l, 1, 'b', 'foo','c','{ "d": "e" }') FROM t1;
SELECT JSON_OBJECT(l, true, 'b', false, 'c', null ) FROM t1;
SELECT JSON_OBJECT(l, 'true', 'b', 'false', 'c', null ) FROM t1;
SELECT JSON_VALID(json_object(l, 1 )) from t1;
--echo # Long key
TRUNCATE t1;
INSERT INTO t1 values('a');
--echo # SELECT JSON_OBJECT(REPEAT(l, 64 * 1024), 1) FROM t1;
--echo # Non-string keys are cast to CHAR
TRUNCATE t1;
INSERT INTO t1 values('a');
SELECT JSON_OBJECT(1, l) FROM t1;
SELECT JSON_OBJECT(CAST(1 AS CHAR), l) FROM t1;
SELECT JSON_OBJECT(true, l) FROM t1;
SELECT JSON_OBJECT(CAST(true AS CHAR), l) FROM t1;
SELECT JSON_OBJECT(false, l) FROM t1;
SELECT JSON_OBJECT(CAST(false AS CHAR), l) FROM t1;
DROP TABLE t1;
DROP DATABASE json_object_db;

View File

@ -0,0 +1,182 @@
--source ../include/disable_before_10.9.inc
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_overlaps_db;
--enable_warnings
CREATE DATABASE json_overlaps_db;
USE json_overlaps_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_OVERLAPS function.
--echo # ----------------------------------------------------------------------
--echo # Comparing scalar
CREATE TABLE t1(l1 LONGTEXT, l2 LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('true', 'true'),
('false', 'false'),
('1', '1'),
('"string1"', '"string1"'),
('null', 'null'),
('true', 'false'),
('1', '"1"'),
('1', '0'),
('null', '0'),
('"string1"', '"string2"'),
('true', '["abc", 1, 2, true, false]'),
('true', '["abc", 1, 2, [true]]'),
('true', '{"A":true}');
SELECT
l1,
l2,
JSON_OVERLAPS(l1, l2) AS is_overlaps
FROM
t1;
--echo # Testing non-scalar json data types
--echo # Comparing object with object (non-nested)
CREATE TABLE t2(l1 LONGTEXT, l2 LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t2
VALUES
('{"A":[1, 2, 3]}', '{}'),
('{"A": 1}', '{"A": 1}'),
('{"A": 1}', '{"B": 1}'),
(
'{"A": 1, "B": "string1"}',
'{"A": 2,"B": "string1"}'
),
(
'{"A": 1,"B": "string1"}',
'{"A": 2,"B": "string2"}'
),
(
'{"A": 1,"B": {"C":2}}',
'{"A": 2,"B": {"C":1}}'
),
(
'{"A": 1,"B": {"C":2}}',
'{"A": 2,"B": {"C":2}}'
),
(
'{"A": {"B": true}}',
'{"A": {"B": true,"C": false}}'
),
(
'{"A":1, "B":{"D":4, "E":5}}',
'{"C":3, "B":{"E":5, "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "B":{"E":5, "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "B":{"E":[5, 6, 7], "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "B":{"E":[7, 6 ,5], "D":4}}'
),
(
'{"A":1, "B":{"D":4, "E":[5, 6, 7]}}',
'{"C":3, "F":{"E":[5, 6, 7], "D":4}}'
),
('[1, 2, true, false, null]', '[3, 4, 1]'),
('[1, 2, true, false, null]', '[3, 4, 5]'),
('[1,2,3]', '[]'),
('[1, 2, true, false, null]', '[3, 4, [1]]'),
(
'[1, 2, [true, false], null]',
'[[1], [true, false]]'
),
('[1, 2, 3, [4, 5, 6]]', '[7, 8, 9, [6, 5, 4]]'),
('[1, 2, true, false, null]', '{"A": 1}'),
(
'[1, 2, true, false, null, {"A":2}]',
'{"A": 1}'
),
('[1, {"A": 2}, {"A": 1}]', '{"A": 1}'),
(
'[1, 2, true, false, {"A": 1, "B": 2}]',
'{"A": 1, "B": 2}'
),
(
'[1, 2, true, false, {"A": 1, "B": 2}]',
'{"A": 1, "B": 3}'
),
(
'[1, 2, true, false, [{"A": 1, "B": 2}]]',
'{"A": 1, "B": 2}'
),
(
'[1, 2, true, false, [{"A": 1, "B": 2}]]',
'{"A": 1, "B": 3}'
),
(
'[1, 2, true, false, [{"A": 1, "B": 2}]]',
'{"A": 1}'
),
(
'[1, 2, true, false, {"A": 1, "B": {"C": 12}}]',
'{"A": 1, "B": {"C": 12}}'
),
(
'[1, 2, true, false, [{"A": 1, "B": {"C": 12}}]]',
'{"A": 1, "B": {"C": 12}}'
),
(
'[1, 2, true, false, [{"A": 1, "B": {"C": 12}}]]',
'{"A": 1, "B":{"C": 12}}'
),
(
'[[1, 2, true, false, {"A": 1, "B": {"C": 12}}]]',
'{"A": 1, "B": {"C": 12}}'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, {"A": 1, "B": 2}]'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, {"A": 1, "B": 3}]'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, {"A": 1, "B": 2}, {"A": 1, "B": 3}]'
),
(
'{"A": 1, "B": [1, 2, 3]}',
'[1, 2, true, false, {"A": 1, "B": 2}, {"A": 1, "B": [1, 2, 3]}]'
),
(
'{"A": 1, "B": [1, 2, {"C": 3, "D": 5}]}',
'[1, 2, true, false, {"A": 1, "B": 2}, {"A":1, "B":[1, 2, {"C": 3, "D": 5}]}]'
),
(
'{"A": 1, "B": [1, 2, {"C": 3, "D": 5}]}',
'[1, 2, true, false, {"A": 1, "B": 2},{"A": 1, "B": [1, 2, {"C": 3, "D": 4}]}]'
),
(
'{"A": 1, "B": 3}',
'[1, 2, true, false, [{"A": 1, "B": 2}, {"A": 1, "B": 3}]]'
);
SELECT
l1,
l2,
JSON_OVERLAPS(l1, l2) AS is_overlaps
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_overlaps_db;

View File

@ -0,0 +1,181 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_quote_db;
--enable_warnings
CREATE DATABASE json_quote_db;
USE json_quote_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_QUOTE, JSON_UNQUOTE function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = columnstore;
INSERT INTO
t1
VALUES
(NULL);
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT
JSON_QUOTE(l, NULL)
FROM
t1;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT
JSON_QUOTE(l, 'bar')
FROM
t1;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT
JSON_UNQUOTE(l, NULL)
FROM
t1;
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
SELECT
JSON_UNQUOTE(l, 'bar')
FROM
t1;
--echo # Null arg
SELECT
JSON_QUOTE(l)
FROM
t1;
SELECT
JSON_UNQUOTE(l)
FROM
t1;
--echo # Calling based on encodings
TRUNCATE t1;
INSERT INTO
t1
VALUES
('abc');
SELECT
l AS raw,
JSON_QUOTE(CONVERT(l USING ascii)) AS quote_ascii,
JSON_QUOTE(CONVERT(l USING latin1)) AS quote_latin1,
JSON_QUOTE(CONVERT(l USING utf8)) AS quote_utf8,
JSON_QUOTE(CONVERT(l USING utf8mb4)) AS quote_utf8mb4
FROM
t1;
--echo # Chinese characters (normal in console,abnormal in test)
CREATE TABLE t2(l VARCHAR(50)) ENGINE = columnstore;
INSERT INTO
t2
VALUES
(X'e68891');
SELECT
*
FROM
t2;
SET
NAMES 'utf8';
-- echo # All should be the Chinese "I" i.e. 我
SELECT
JSON_QUOTE(CONVERT(l USING utf8)) AS quote_utf8,
JSON_COMPACT(JSON_QUOTE(CONVERT(l USING utf8))) AS compact_quote_utf8,
JSON_QUOTE(CONVERT(l USING utf8mb4)) AS quote_utf8mb4,
JSON_UNQUOTE(CONVERT(l USING utf8)) AS unquote_utf8
FROM
t2;
--echo # Do nothing
TRUNCATE t1;
INSERT INTO
t1
VALUES
('"');
SELECT
JSON_QUOTE(l),
JSON_UNQUOTE(l)
FROM
t1;
TRUNCATE t1;
INSERT INTO
t1
VALUES
('""');
SELECT
JSON_UNQUOTE(l),
CHAR_LENGTH(JSON_UNQUOTE(l))
FROM
t1;
TRUNCATE t1;
INSERT INTO
t1
VALUES
('"" ');
SELECT
JSON_UNQUOTE(l)
FROM
t1;
--echo # Inconrrect type e.g. Integer
CREATE TABLE t3(i INT) ENGINE = columnstore;
INSERT INTO
t3
VALUES
(123);
SELECT
JSON_QUOTE(i)
FROM
t3;
SELECT
JSON_UNQUOTE(i)
FROM
t3;
--echo # Round trip
TRUNCATE t1;
INSERT INTO
t1
VALUES
('abc');
SELECT
JSON_UNQUOTE(JSON_COMPACT(JSON_QUOTE(l)))
FROM
t1;
SELECT
JSON_UNQUOTE(
JSON_UNQUOTE(
JSON_UNQUOTE(JSON_QUOTE(JSON_QUOTE(JSON_QUOTE(l))))
)
)
FROM
t1;
DROP TABLE t1;
--echo # DROP TABLE t2;
DROP TABLE t3;
DROP DATABASE json_quote_db;

View File

@ -0,0 +1,52 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_remove_db;
--enable_warnings
CREATE DATABASE json_remove_db;
USE json_remove_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_REMOVE function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(j TEXT, p TEXT) ENGINE = columnstore;
INSERT INTO
t1
VALUES
('["a", ["b", "c"], "d"]', '$[0]'),
('["a", ["b", "c"], "d"]', '$[1]'),
('["a", ["b", "c"], "d"]', '$[1][0]'),
('["a", ["b", "c"], "d"]', '$[0]');
SELECT
j AS json,
p AS path,
JSON_REMOVE(j, p) AS result
FROM
t1;
CREATE TABLE t2(j TEXT, p TEXT) ENGINE = columnstore;
INSERT INTO
t2
VALUES
('{"a": 1, "b": [2, 3]}', '$.a'),
('{"a": 1, "b": [2, 3]}', '$.a[0]'),
('{"a": 1, "b": [2, 3]}', '$.b'),
('{"a": 1, "b": [2, 3]}', '$.b[0]'),
('{"a": 1, "b": [2, 3]}', '$.b[1]');
SELECT
j AS json,
p AS path,
JSON_REMOVE(j, p) AS result
FROM
t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_remove_db;

View File

@ -0,0 +1,102 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_search_db;
--enable_warnings
CREATE DATABASE json_search_db;
USE json_search_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_SEARCH function.
--echo # ----------------------------------------------------------------------
--echo
--echo # JSON_SEARCH with single path expression
--echo
CREATE TABLE t1(j TEXT, f TEXT, v TEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
(
'["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]',
'one',
'abc'
),
(
'["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]',
'all',
'abc'
),
('{"x": "\\""}', "one", '"'),
('{"x": "\\""}', "one", '\\"');
SELECT
j AS json,
f AS return_arg,
v AS search_str,
j AS json, f AS return_arg, JSON_SEARCH(j, f, v) AS result
FROM
t1;
--echo
--echo # JSON_SEARCH with path expression
--echo
CREATE TABLE t2(j TEXT, f TEXT, v TEXT, e TEXT, p TEXT) ENGINE = COLUMNSTORE;
SET
@j = '["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]';
INSERT INTO
t2
VALUES
(@j, 'all', 'abc', NULL, '$[0]'),
(@j, 'all', 'abc', NULL, '$[2]'),
(@j, 'all', '10', NULL, '$[1]'),
(@j, 'all', '10', NULL, '$[2]');
SELECT
j AS json,
f AS return_arg,
v AS search_str,
p AS path,
JSON_SEARCH(j, f, v, NULL, p) AS result
FROM
t2;
--echo
--echo # JSON_SEARCH with escape char
--echo
CREATE TABLE t3(j TEXT, f TEXT) ENGINE = COLUMNSTORE;
set @json = '[ "footbar", "foo%bar" ]';
INSERT INTO t3 VALUES(@json,'all');
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo%bar' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]', '$[1]' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]', '$[1]', '$[2]' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar', null ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar', null, '$[0]' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo\%bar', null, '$[1]' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[0]' ) FROM t3;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'foo|%bar', '|', '$[1]' ) FROM t3;
--echo
--echo # JSON_SEARCH in case-sensitive
--echo
CREATE TABLE t4(j TEXT, f TEXT) ENGINE = COLUMNSTORE;
INSERT INTO t4 VALUES('["abc", "ABC"]', 'all');
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'aBC') FROM t4;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'abc') FROM t4;
SELECT j AS json, f AS return_arg, JSON_SEARCH(j, f, 'ABC') FROM t4;
drop TABLE t4;
drop TABLE t3;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_search_db;

View File

@ -0,0 +1,51 @@
--source ../include/disable_before_10.9.inc
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_type_db;
--enable_warnings
CREATE DATABASE json_type_db;
USE json_type_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_TYPE function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(l LONGTEXT) ENGINE = COLUMNSTORE;
--echo # Error ER_INVALID_JSON_TEXT_IN_PARAM
INSERT INTO
t1
VALUES
('abc');
SELECT
JSON_TYPE(l)
FROM
t1;
--echo # String literal - valid JSON
TRUNCATE t1;
INSERT INTO
t1
VALUES
('{"a": 2}'),
('[1,2]'),
('"scalar string"'),
('true'),
('false'),
('null'),
('1'),
('-0'),
('-0.0');
SELECT
l AS json,
JSON_TYPE(l) AS TYPE
FROM
t1;
DROP TABLE t1;
DROP DATABASE json_type_db;

View File

@ -0,0 +1,195 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_valid_db;
--enable_warnings
CREATE DATABASE json_valid_db;
USE json_valid_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_VALID function.
--echo # ----------------------------------------------------------------------
--echo #
--echo # String literal - valid JSON
--echo #
CREATE TABLE t1(l LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t1
VALUES
('123'),
('-123'),
('5000000000'),
('-5000000000'),
('1.23'),
('"123"'),
('true'),
('false'),
('null'),
('{"address": "Trondheim"}'),
(JSON_OBJECT()),
(JSON_OBJECT(1, 2)),
(JSON_ARRAY()),
(JSON_ARRAY(1, 2));
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
JSON_VALID(JSON_COMPACT(l)) AS compact
FROM
t1;
--echo #
--echo # String literal - invalid JSON
--echo #
TRUNCATE t1;
INSERT INTO
t1
VALUES
('12 3'),
('{key:value}'),
('{key:value'),
('[1,2,]'),
('[1,2');
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
JSON_VALID(JSON_COMPACT(l)) AS compact
FROM
t1;
--echo #
--echo # String literal - not in UTF-8
--echo #
TRUNCATE t1;
SET
NAMES 'ascii';
INSERT INTO
t1
VALUES
('123');
SELECT
l AS raw,
JSON_VALID(l) AS is_valid,
JSON_VALID(JSON_COMPACT(l)) AS compact
FROM
t1;
SET
NAMES 'utf8';
--echo #
--echo # Bare NULL
--echo #
TRUNCATE t1;
INSERT INTO
t1
VALUES
(NULL);
SELECT
JSON_VALID(l)
FROM
t1;
--echo #
--echo # Function result - string
--echo #
TRUNCATE t1;
INSERT INTO
t1
VALUES
(UPPER('"abc"'));
SELECT
JSON_VALID(l)
FROM
t1;
--echo #
--echo # Function result - string not in UTF-8
--echo #
TRUNCATE t1;
SET
NAMES 'latin1';
INSERT INTO
t1
VALUES
(UPPER('"abc"'));
SELECT
JSON_VALID(l)
FROM
t1;
SET
NAMES 'utf8';
--echo #
--echo # Function result - date, not valid as JSON without CAST
--echo #
TRUNCATE t1;
INSERT INTO
t1
VALUES
(CAST('2015-01-15' AS DATE));
SELECT
JSON_VALID(l)
FROM
t1;
--echo #
--echo # The date string doesn't parse as JSON text, so wrong:
--echo #
TRUNCATE t1;
INSERT INTO
t1
VALUES
(
CAST(
CAST('2015-01-15' AS DATE) AS CHAR CHARACTER SET 'utf8'
)
);
SELECT
JSON_VALID(l)
FROM
t1;
--echo #
--echo # Function result - NULL
--echo #
TRUNCATE t1;
INSERT INTO
t1
VALUES
(UPPER(NULL));
INSERT INTO
t1
VALUES
(UPPER(CAST(NULL AS CHAR)));
SELECT
JSON_VALID(l)
FROM
t1;
DROP TABLE t1;
DROP DATABASE json_valid_db;

View File

@ -0,0 +1,43 @@
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_value_db;
--enable_warnings
CREATE DATABASE json_value_db;
USE json_value_db;
--echo # ----------------------------------------------------------------------
--echo # Test of JSON_VALUE function.
--echo # ----------------------------------------------------------------------
CREATE TABLE t1(s TEXT, p TEXT) ENGINE = columnstore;
INSERT INTO t1 VALUES('{"key1":123}', '$.key2'),
('{"key1":123}', '$.key1'),
('{"key1":[1,2,3]}', '$.key1'),
('{"key1": [1,2,3], "key1":123}', '$.key1'),
('{ "x": [0,1], "y": "[0,1]", "z": "Mon\\\"t\\\"y" }','$.z'),
('{"\\"key1":123}', '$."\\"key1"'),
('{"\\"key1\\"":123}', '$."\\"key1\\""'),
('{"key 1":123}', '$."key 1"');
SELECT s as json_text, p as path, JSON_VALUE(s, p) as json_value, JSON_QUERY(s, p) as json_query
FROM t1;
CREATE TABLE t2(s TEXT) ENGINE = columnstore;
INSERT INTO t2 VALUES('{"key1":123, "key2":{"key3":"value3"}}'),
('{"key1":123, "key3":[1,2,3]}'),
('{"key1":123, "key2":"[1]"}');
SELECT s as json_text, '$.key1' , JSON_VALUE(s, '$.key1') as json_value, JSON_QUERY(s, '$.key1') as json_query
FROM t2;
SELECT s as json_text, '$.key2' , JSON_VALUE(s, '$.key2') as json_value, JSON_QUERY(s, '$.key2') as json_query
FROM t2;
SELECT s as json_text, '$.key3' , JSON_VALUE(s, '$.key3') as json_value, JSON_QUERY(s, '$.key3') as json_query
FROM t2;
DROP TABLE t2;
DROP TABLE t1;
DROP DATABASE json_value_db;

View File

@ -0,0 +1,153 @@
--source ../include/disable_before_10.9.inc
--source ../include/have_columnstore.inc
--disable_warnings
DROP DATABASE IF EXISTS json_range_expr_db;
--enable_warnings
CREATE DATABASE json_range_expr_db;
USE json_range_expr_db;
--echo # Range expression is supported in MariaDB 10.9 binaries
CREATE TABLE t2(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '{
"A": [0,
[1, 2, 3],
[4, 5, 6],
"seven",
0.8,
true,
false,
"eleven",
[12, 13, {"key1":"value1"},[15]],
true],
"B": {"C": 1},
"D": 2
}';
INSERT INTO
t2
VALUES
(@json, '$.A[-2][-1]'),
(@json, '$.A[last-1][last]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t2;
--echo # Test case 2
CREATE TABLE t3(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
]';
INSERT INTO
t3
VALUES
(@json, '$[3][3][-2 to last]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t3;
-- echo # Test case 3
CREATE TABLE t4(j TEXT, p TEXT) ENGINE = columnstore;
SET
@json = '[
[1, {"key1": "value1"}, 3],
[false, 5, 6],
[7, 8, [9, {"key2": 2}, 11]],
[15, 1.34, [14], ["string1", [16, {"key1":[1,2,3,[4,5,6]]}, 18]]],
[19, 20],
21, 22
]';
INSERT INTO
t4
VALUES
(@json, '$[2][2][1 to 2]'),
(@json, '$[2][2][4 to 6]'),
(@json, '$[2][2][1 to 4]');
SELECT
j,
p,
JSON_EXISTS(j, p) AS result
FROM
t4;
--echo
--echo # JSON_EXTRACT
--echo
CREATE TABLE t5(j LONGTEXT, p LONGTEXT) ENGINE = COLUMNSTORE;
INSERT INTO
t5
VALUES
('[1, "val2", [3.1, -4]]', '$'),
('1', '$'),
('[10, 20, [30, 40]]', '$[2][*]'),
('[10, 20, [{"a":3}, 30, 40]]', '$[2][*]'),
(json_object('foo', 'foobar'), '$');
SELECT
j,
p,
JSON_EXTRACT(j, p) AS result
FROM
t5;
--echo
--echo # JSON_EXTRACT
--echo
CREATE TABLE t6(j TEXT, f TEXT, v TEXT, e TEXT, p TEXT) ENGINE = COLUMNSTORE;
SET
@j = '["abc", [{"k": "10"}, "def"], {"x":"abc"}, {"y":"bcd"}]';
INSERT INTO
t6
VALUES
(@j, 'all', 'abc', NULL, '$'),
(@j, 'all', '10', NULL, '$'),
(@j, 'all', '10', NULL, '$[*]'),
(@j, 'all', '10', NULL, '$[*][0].k'),
(@j, 'all', '10', NULL, '$**.k');
SELECT
j AS json,
f AS return_arg,
v AS search_str,
e AS escape_char,
p AS path,
JSON_SEARCH(j, f, v, NULL, p) AS result
FROM
t6;
DROP TABLE t6;
DROP TABLE t5;
DROP TABLE t4;
DROP TABLE t3;
DROP TABLE t2;
DROP DATABASE json_range_expr_db;

View File

@ -0,0 +1,4 @@
if (`select version() not like '%10.9%'`)
{
skip Should run with after MariaDB 10.9 binaries;
}

View File

@ -57,6 +57,32 @@ set(funcexp_LIB_SRCS
func_insert.cpp func_insert.cpp
func_instr.cpp func_instr.cpp
func_isnull.cpp func_isnull.cpp
func_json_array.cpp
func_json_array_append.cpp
func_json_array_insert.cpp
func_json_contains.cpp
func_json_contains_path.cpp
func_json_depth.cpp
func_json_equals.cpp
func_json_exists.cpp
func_json_extract.cpp
func_json_format.cpp
func_json_insert.cpp
func_json_keys.cpp
func_json_length.cpp
func_json_merge.cpp
func_json_merge_patch.cpp
func_json_normalize.cpp
func_json_object.cpp
func_json_overlaps.cpp
func_json_query.cpp
func_json_quote.cpp
func_json_remove.cpp
func_json_search.cpp
func_json_type.cpp
func_json_unquote.cpp
func_json_valid.cpp
func_json_value.cpp
func_last_day.cpp func_last_day.cpp
func_lcase.cpp func_lcase.cpp
func_least.cpp func_least.cpp
@ -117,6 +143,7 @@ set(funcexp_LIB_SRCS
func_weekday.cpp func_weekday.cpp
func_year.cpp func_year.cpp
func_yearweek.cpp func_yearweek.cpp
jsonhelpers.cpp
sql_crypt.cpp) sql_crypt.cpp)
add_library(funcexp SHARED ${funcexp_LIB_SRCS}) add_library(funcexp SHARED ${funcexp_LIB_SRCS})

View File

@ -0,0 +1,52 @@
#include <string>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_array::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp.size() > 0 ? fp[0]->data()->resultType() : resultType;
}
string Func_json_array::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
if (fp.size() == 0)
return "[]";
const CHARSET_INFO* retCS = type.getCharset();
string ret("[");
if (appendJSValue(ret, retCS, row, fp[0]))
goto error;
for (size_t i = 1; i < fp.size(); i++)
{
ret.append(", ");
if (appendJSValue(ret, retCS, row, fp[i]))
goto error;
}
ret.append("]");
return ret;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,117 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_array_append::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_array_append::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
const CHARSET_INFO* cs = getCharset(fp[0]);
json_engine_t jsEg;
const uchar* arrEnd;
size_t strRestLen;
string retJS;
retJS.reserve(js.size() + padding);
initJSPaths(paths, fp, 1, 2);
string tmpJS{js};
for (size_t i = 1, j = 0; i < fp.size(); i += 2, j++)
{
const char* rawJS = tmpJS.data();
const size_t jsLen = tmpJS.size();
JSONPath& path = paths[j];
if (!path.parsed && parseJSPath(path, row, fp[i], false))
goto error;
initJSEngine(jsEg, cs, tmpJS);
if (locateJSPath(jsEg, path))
goto error;
if (json_read_value(&jsEg))
goto error;
if (jsEg.value_type == JSON_VALUE_ARRAY)
{
int itemSize;
if (json_skip_level_and_count(&jsEg, &itemSize))
goto error;
arrEnd = jsEg.s.c_str - jsEg.sav_c_len;
strRestLen = jsLen - (arrEnd - (const uchar*)rawJS);
retJS.append(rawJS, arrEnd - (const uchar*)rawJS);
if (itemSize)
retJS.append(", ");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append((const char*)arrEnd, strRestLen);
}
else
{
const uchar *start, *end;
/* Wrap as an array. */
retJS.append(rawJS, (const char*)jsEg.value_begin - rawJS);
start = jsEg.value_begin;
if (jsEg.value_type == JSON_VALUE_OBJECT)
{
if (json_skip_level(&jsEg))
goto error;
end = jsEg.s.c_str;
}
else
end = jsEg.value_end;
retJS.append("[");
retJS.append((const char*)start, end - start);
retJS.append(", ");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append("]");
retJS.append((const char*)jsEg.s.c_str, rawJS + jsLen - (const char*)jsEg.s.c_str);
}
// tmpJS save the json string for next loop
tmpJS.swap(retJS);
retJS.clear();
}
initJSEngine(jsEg, cs, tmpJS);
retJS.clear();
if (doFormat(&jsEg, retJS, Func_json_format::LOOSE))
goto error;
isNull = false;
return retJS;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,142 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_array_insert::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_array_insert::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
const CHARSET_INFO* cs = getCharset(fp[0]);
json_engine_t jsEg;
string retJS;
retJS.reserve(js.size() + 8);
initJSPaths(paths, fp, 1, 2);
string tmpJS{js};
for (size_t i = 1, j = 0; i < fp.size(); i += 2, j++)
{
const char* rawJS = tmpJS.data();
const size_t jsLen = tmpJS.size();
JSONPath& path = paths[j];
if (!path.parsed)
{
if (parseJSPath(path, row, fp[i]) || path.p.last_step - 1 < path.p.steps ||
path.p.last_step->type != JSON_PATH_ARRAY)
{
if (path.p.s.error == 0)
path.p.s.error = SHOULD_END_WITH_ARRAY;
goto error;
}
path.p.last_step--;
}
initJSEngine(jsEg, cs, tmpJS);
path.currStep = path.p.steps;
int jsErr = 0;
if (locateJSPath(jsEg, path, &jsErr))
{
if (jsErr)
goto error;
// Can't find the array to insert.
continue;
}
if (json_read_value(&jsEg))
goto error;
if (jsEg.value_type != JSON_VALUE_ARRAY)
{
/* Must be an array. */
continue;
}
const char* itemPos = 0;
IntType itemSize = 0;
while (json_scan_next(&jsEg) == 0 && jsEg.state != JST_ARRAY_END)
{
DBUG_ASSERT(jsEg.state == JST_VALUE);
if (itemSize == path.p.last_step[1].n_item)
{
itemPos = (const char*)jsEg.s.c_str;
break;
}
itemSize++;
if (json_read_value(&jsEg) || (!json_value_scalar(&jsEg) && json_skip_level(&jsEg)))
goto error;
}
if (unlikely(jsEg.s.error || *jsEg.killed_ptr))
goto error;
if (itemPos)
{
retJS.append(rawJS, itemPos - rawJS);
if (itemSize > 0)
retJS.append(" ");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append(",");
if (itemSize == 0)
retJS.append(" ");
retJS.append(itemPos, rawJS + jsLen - itemPos);
}
else
{
/* Insert position wasn't found - append to the array. */
DBUG_ASSERT(jsEg.state == JST_ARRAY_END);
itemPos = (const char*)(jsEg.s.c_str - jsEg.sav_c_len);
retJS.append(rawJS, itemPos - rawJS);
if (itemSize > 0)
retJS.append(", ");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append(itemPos, rawJS + jsLen - itemPos);
}
// tmpJS save the json string for next loop
tmpJS.swap(retJS);
retJS.clear();
}
initJSEngine(jsEg, cs, tmpJS);
retJS.clear();
if (doFormat(&jsEg, retJS, Func_json_format::LOOSE))
goto error;
isNull = false;
return retJS;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,213 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
#include "rowgroup.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace
{
static bool checkContains(json_engine_t* jsEg, json_engine_t* valEg)
{
json_engine_t localJsEg;
bool isEgSet;
switch (jsEg->value_type)
{
case JSON_VALUE_OBJECT:
{
json_string_t keyName;
if (valEg->value_type != JSON_VALUE_OBJECT)
return false;
localJsEg = *jsEg;
isEgSet = false;
json_string_set_cs(&keyName, valEg->s.cs);
while (json_scan_next(valEg) == 0 && valEg->state != JST_OBJ_END)
{
const uchar *keyStart, *keyEnd;
DBUG_ASSERT(valEg->state == JST_KEY);
keyStart = valEg->s.c_str;
do
{
keyEnd = valEg->s.c_str;
} while (json_read_keyname_chr(valEg) == 0);
if (unlikely(valEg->s.error) || json_read_value(valEg))
return false;
if (isEgSet)
*jsEg = localJsEg;
else
isEgSet = true;
json_string_set_str(&keyName, keyStart, keyEnd);
if (!findKeyInObject(jsEg, &keyName) || json_read_value(jsEg) || !checkContains(jsEg, valEg))
return false;
}
return valEg->state == JST_OBJ_END && !json_skip_level(jsEg);
}
case JSON_VALUE_ARRAY:
if (valEg->value_type != JSON_VALUE_ARRAY)
{
localJsEg = *valEg;
isEgSet = false;
while (json_scan_next(jsEg) == 0 && jsEg->state != JST_ARRAY_END)
{
int currLevel, isScaler;
DBUG_ASSERT(jsEg->state == JST_VALUE);
if (json_read_value(jsEg))
return false;
if (!(isScaler = json_value_scalar(jsEg)))
currLevel = json_get_level(jsEg);
if (isEgSet)
*valEg = localJsEg;
else
isEgSet = true;
if (checkContains(jsEg, valEg))
{
if (json_skip_level(jsEg))
return false;
return true;
}
if (unlikely(valEg->s.error) || unlikely(jsEg->s.error) ||
(!isScaler && json_skip_to_level(jsEg, currLevel)))
return false;
}
return false;
}
/* else */
localJsEg = *jsEg;
isEgSet = false;
while (json_scan_next(valEg) == 0 && valEg->state != JST_ARRAY_END)
{
DBUG_ASSERT(valEg->state == JST_VALUE);
if (json_read_value(valEg))
return false;
if (isEgSet)
*jsEg = localJsEg;
else
isEgSet = true;
if (!checkContains(jsEg, valEg))
return false;
}
return valEg->state == JST_ARRAY_END;
case JSON_VALUE_STRING:
if (valEg->value_type != JSON_VALUE_STRING)
return false;
/*
TODO: make proper json-json comparison here that takes excipient
into account.
*/
return valEg->value_len == jsEg->value_len && memcmp(valEg->value, jsEg->value, valEg->value_len) == 0;
case JSON_VALUE_NUMBER:
if (valEg->value_type == JSON_VALUE_NUMBER)
{
double jsEgVal, valEgVal;
char* end;
int err;
jsEgVal = jsEg->s.cs->strntod((char*)jsEg->value, jsEg->value_len, &end, &err);
;
valEgVal = valEg->s.cs->strntod((char*)valEg->value, valEg->value_len, &end, &err);
;
return (fabs(jsEgVal - valEgVal) < 1e-12);
}
else
return false;
default: break;
}
/*
We have these not mentioned in the 'switch' above:
case JSON_VALUE_TRUE:
case JSON_VALUE_FALSE:
case JSON_VALUE_NULL:
*/
return valEg->value_type == jsEg->value_type;
}
} // namespace
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_contains::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
/**
* getBoolVal API definition
*/
bool Func_json_contains::getBoolVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
bool isNullJS = false, isNullVal = false;
const string_view js = fp[0]->data()->getStrVal(row, isNullJS);
const string_view val = fp[1]->data()->getStrVal(row, isNullVal);
if (isNullJS || isNullVal)
{
isNull = true;
return false;
}
bool result = false;
if (!arg2Parsed)
{
if (!arg2Const)
{
ConstantColumn* constCol = dynamic_cast<ConstantColumn*>(fp[1]->data());
arg2Const = (constCol != nullptr);
}
arg2Val = val;
arg2Parsed = arg2Const;
}
json_engine_t jsEg;
initJSEngine(jsEg, getCharset(fp[0]), js);
if (fp.size() > 2)
{
if (!path.parsed && parseJSPath(path, row, fp[2], false))
goto error;
if (locateJSPath(jsEg, path))
goto error;
}
json_engine_t valEg;
initJSEngine(valEg, getCharset(fp[1]), arg2Val);
if (json_read_value(&jsEg) || json_read_value(&valEg))
goto error;
result = checkContains(&jsEg, &valEg);
if (unlikely(jsEg.s.error || valEg.s.error))
goto error;
return result;
error:
isNull = true;
return false;
}
} // namespace funcexp

View File

@ -0,0 +1,141 @@
#include <string_view>
#include <algorithm>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
#include "rowgroup.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_contains_path::operationType(
FunctionParm& fp, CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
/**
* getBoolVal API definition
*/
bool Func_json_contains_path::getBoolVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return false;
#ifdef MYSQL_GE_1009
int arrayCounters[JSON_DEPTH_LIMIT];
bool hasNegPath = false;
#endif
const int argSize = fp.size() - 2;
if (!isModeParsed)
{
if (!isModeConst)
isModeConst = (dynamic_cast<ConstantColumn*>(fp[1]->data()) != nullptr);
string mode = fp[1]->data()->getStrVal(row, isNull);
if (isNull)
return false;
transform(mode.begin(), mode.end(), mode.begin(), ::tolower);
if (mode != "one" && mode != "all")
{
isNull = true;
return false;
}
isModeOne = (mode == "one");
isModeParsed = isModeConst;
}
initJSPaths(paths, fp, 2, 1);
if (paths.size() == 0)
hasFound.assign(argSize, false);
for (size_t i = 2; i < fp.size(); i++)
{
JSONPath& path = paths[i - 2];
if (!path.parsed)
{
if (parseJSPath(path, row, fp[i]))
{
isNull = true;
return false;
}
#ifdef MYSQL_GE_1009
hasNegPath |= path.p.types_used & JSON_PATH_NEGATIVE_INDEX;
#endif
}
}
json_engine_t jsEg;
json_path_t p;
json_get_path_start(&jsEg, getCharset(fp[0]), (const uchar*)js.data(), (const uchar*)js.data() + js.size(),
&p);
bool result = false;
int needFound = 0;
if (!isModeOne)
{
hasFound.assign(argSize, false);
needFound = argSize;
}
while (json_get_path_next(&jsEg, &p) == 0)
{
#ifdef MYSQL_GE_1009
if (hasNegPath && jsEg.value_type == JSON_VALUE_ARRAY &&
json_skip_array_and_count(&jsEg, arrayCounters + (p.last_step - p.steps)))
{
result = true;
break;
}
#endif
for (int restSize = argSize, curr = 0; restSize > 0; restSize--, curr++)
{
JSONPath& path = paths[curr];
#ifdef MYSQL_GE_1009
int cmp = cmpJSPath(&path.p, &p, jsEg.value_type, arrayCounters);
#else
int cmp = cmpJSPath(&path.p, &p, jsEg.value_type);
#endif
if (cmp >= 0)
{
if (isModeOne)
{
result = true;
break;
}
/* mode_all */
if (hasFound[restSize - 1])
continue; /* already found */
if (--needFound == 0)
{
result = true;
break;
}
hasFound[restSize - 1] = true;
}
}
}
if (likely(jsEg.s.error == 0))
return result;
isNull = true;
return false;
}
} // namespace funcexp

View File

@ -0,0 +1,67 @@
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "dataconvert.h"
using namespace dataconvert;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_depth::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
int64_t Func_json_depth::getIntVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& op_ct)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return 0;
int depth = 0, currDepth = 0;
bool incDepth = true;
json_engine_t jsEg;
initJSEngine(jsEg, getCharset(fp[0]), js);
do
{
switch (jsEg.state)
{
case JST_VALUE:
case JST_KEY:
if (incDepth)
{
currDepth++;
incDepth = false;
if (currDepth > depth)
depth = currDepth;
}
break;
case JST_OBJ_START:
case JST_ARRAY_START: incDepth = true; break;
case JST_OBJ_END:
case JST_ARRAY_END:
if (!incDepth)
currDepth--;
incDepth = false;
break;
default: break;
}
} while (json_scan_next(&jsEg) == 0);
if (likely(!jsEg.s.error))
return depth;
isNull = true;
return 0;
}
} // namespace funcexp

View File

@ -0,0 +1,71 @@
#include <string_view>
#include <memory>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
#include "rowgroup.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_equals::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
/**
* getBoolVal API definition
*/
bool Func_json_equals::getBoolVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
// auto release the DYNAMIC_STRING
using DynamicString = unique_ptr<DYNAMIC_STRING, decltype(&dynstr_free)>;
DynamicString str1{new DYNAMIC_STRING(), dynstr_free};
if (init_dynamic_string(str1.get(), NULL, 0, 0))
{
isNull = true;
return true;
}
DynamicString str2{new DYNAMIC_STRING(), dynstr_free};
if (init_dynamic_string(str2.get(), NULL, 0, 0))
{
isNull = true;
return true;
}
const string_view js1 = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return false;
const string_view js2 = fp[1]->data()->getStrVal(row, isNull);
if (isNull)
return false;
bool result = false;
if (json_normalize(str1.get(), js1.data(), js1.size(), getCharset(fp[0])))
{
isNull = true;
return result;
}
if (json_normalize(str2.get(), js2.data(), js2.size(), getCharset(fp[1])))
{
isNull = true;
return result;
}
result = strcmp(str1->str, str2->str) ? false : true;
return result;
}
} // namespace funcexp

View File

@ -0,0 +1,51 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
#include "rowgroup.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_exists::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
/**
* getBoolVal API definition
*/
bool Func_json_exists::getBoolVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return false;
int jsErr = 0;
json_engine_t jsEg;
initJSEngine(jsEg, getCharset(fp[0]), js);
if (!path.parsed && parseJSPath(path, row, fp[1]))
goto error;
if (locateJSPath(jsEg, path, &jsErr))
{
if (jsErr)
goto error;
return false;
}
return true;
error:
isNull = true;
return false;
}
} // namespace funcexp

View File

@ -0,0 +1,243 @@
#include <type_traits>
#include "functor_json.h"
#include "functioncolumn.h"
#include "rowgroup.h"
#include "treenode.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
int Func_json_extract::doExtract(Row& row, FunctionParm& fp, json_value_types* type, string& retJS,
bool compareWhole = true)
{
bool isNull = false;
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return 1;
const char* rawJS = js.data();
json_engine_t jsEg, savJSEg;
json_path_t p;
const uchar* value;
bool notFirstVal = false;
size_t valLen;
bool mayMulVal;
int wildcards;
bool isMatch;
#ifdef MYSQL_GE_1009
int arrayCounter[JSON_DEPTH_LIMIT];
bool hasNegPath = false;
#endif
const size_t argSize = fp.size();
string tmp;
initJSPaths(paths, fp, 1, 1);
for (size_t i = 1; i < argSize; i++)
{
JSONPath& path = paths[i - 1];
path.p.types_used = JSON_PATH_KEY_NULL;
if (!path.parsed && parseJSPath(path, row, fp[i]))
goto error;
#ifdef MYSQL_GE_1009
hasNegPath |= path.p.types_used & JSON_PATH_NEGATIVE_INDEX;
#endif
}
#ifdef MYSQL_GE_1009
wildcards = (JSON_PATH_WILD | JSON_PATH_DOUBLE_WILD | JSON_PATH_ARRAY_RANGE);
#else
wildcards = (JSON_PATH_WILD | JSON_PATH_DOUBLE_WILD);
#endif
mayMulVal = argSize > 2 || (paths[0].p.types_used & wildcards);
*type = mayMulVal ? JSON_VALUE_ARRAY : JSON_VALUE_NULL;
if (compareWhole)
{
retJS.clear();
if (mayMulVal)
retJS.append("[");
}
json_get_path_start(&jsEg, getCharset(fp[0]), (const uchar*)rawJS, (const uchar*)rawJS + js.size(), &p);
while (json_get_path_next(&jsEg, &p) == 0)
{
#ifdef MYSQL_GE_1009
if (hasNegPath && jsEg.value_type == JSON_VALUE_ARRAY &&
json_skip_array_and_count(&jsEg, arrayCounter + (p.last_step - p.steps)))
goto error;
#endif
#ifdef MYSQL_GE_1009
isMatch = matchJSPath(paths, &p, jsEg.value_type, arrayCounter, false);
#else
isMatch = matchJSPath(paths, &p, jsEg.value_type, nullptr, false);
#endif
if (!isMatch)
continue;
value = jsEg.value_begin;
if (*type == JSON_VALUE_NULL)
*type = jsEg.value_type;
/* we only care about the first found value */
if (!compareWhole)
{
retJS = js;
return 0;
}
if (json_value_scalar(&jsEg))
valLen = jsEg.value_end - value;
else
{
if (mayMulVal)
savJSEg = jsEg;
if (json_skip_level(&jsEg))
goto error;
valLen = jsEg.s.c_str - value;
if (mayMulVal)
jsEg = savJSEg;
}
if (notFirstVal)
retJS.append(", ");
retJS.append((const char*)value, valLen);
notFirstVal = true;
if (!mayMulVal)
{
/* Loop to the end of the JSON just to make sure it's valid. */
while (json_get_path_next(&jsEg, &p) == 0)
{
}
break;
}
}
if (unlikely(jsEg.s.error))
goto error;
if (!notFirstVal)
/* Nothing was found. */
goto error;
if (mayMulVal)
retJS.append("]");
initJSEngine(jsEg, getCharset(fp[0]), retJS);
if (doFormat(&jsEg, tmp, Func_json_format::LOOSE))
goto error;
retJS.clear();
retJS.swap(tmp);
return 0;
error:
return 1;
}
CalpontSystemCatalog::ColType Func_json_extract::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_extract::getStrVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
string retJS;
json_value_types valType;
if (doExtract(row, fp, &valType, retJS) == 0)
return retJS;
isNull = true;
return "";
}
int64_t Func_json_extract::getIntVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
string retJS;
json_value_types valType;
int64_t ret = 0;
if (doExtract(row, fp, &valType, retJS, false) == 0)
{
switch (valType)
{
case JSON_VALUE_NUMBER:
case JSON_VALUE_STRING:
{
char* end;
int err;
ret = getCharset(fp[0])->strntoll(retJS.data(), retJS.size(), 10, &end, &err);
break;
}
case JSON_VALUE_TRUE: ret = 1; break;
default: break;
};
}
return ret;
}
double Func_json_extract::getDoubleVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
string retJS;
json_value_types valType;
double ret = 0.0;
if (doExtract(row, fp, &valType, retJS, false) == 0)
{
switch (valType)
{
case JSON_VALUE_NUMBER:
case JSON_VALUE_STRING:
{
char* end;
int err;
ret = getCharset(fp[0])->strntod(retJS.data(), retJS.size(), &end, &err);
break;
}
case JSON_VALUE_TRUE: ret = 1.0; break;
default: break;
};
}
return ret;
}
execplan::IDB_Decimal Func_json_extract::getDecimalVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
json_value_types valType;
string retJS;
if (doExtract(row, fp, &valType, retJS, false) == 0)
{
switch (valType)
{
case JSON_VALUE_STRING:
case JSON_VALUE_NUMBER: return fp[0]->data()->getDecimalVal(row, isNull);
case JSON_VALUE_TRUE: return IDB_Decimal(1, 0, 1);
case JSON_VALUE_OBJECT:
case JSON_VALUE_ARRAY:
case JSON_VALUE_FALSE:
case JSON_VALUE_NULL:
case JSON_VALUE_UNINITIALIZED: break;
};
}
return IDB_Decimal(0, 0, 1);
}
} // namespace funcexp

View File

@ -0,0 +1,61 @@
#include <string_view>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_format::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_format::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
int tabSize = 4;
if (fmt == DETAILED)
{
if (fp.size() > 1)
{
tabSize = fp[1]->data()->getIntVal(row, isNull);
if (isNull)
return "";
if (tabSize < 0)
tabSize = 0;
else if (tabSize > TAB_SIZE_LIMIT)
tabSize = TAB_SIZE_LIMIT;
}
}
json_engine_t jsEg;
initJSEngine(jsEg, getCharset(fp[0]), js);
string ret;
if (doFormat(&jsEg, ret, fmt, tabSize))
{
isNull = true;
return "";
}
isNull = false;
return ret;
}
} // namespace funcexp

View File

@ -0,0 +1,245 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "dataconvert.h"
using namespace dataconvert;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_insert::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_insert::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
const bool isInsertMode = mode == INSERT || mode == SET;
const bool isReplaceMode = mode == REPLACE || mode == SET;
json_engine_t jsEg;
int jsErr = 0;
json_string_t keyName;
const CHARSET_INFO* cs = getCharset(fp[0]);
json_string_set_cs(&keyName, cs);
initJSPaths(paths, fp, 1, 2);
// Save the result of each merge and the result of the final merge separately
string retJS;
string tmpJS{js};
for (size_t i = 1, j = 0; i < fp.size(); i += 2, j++)
{
const char* rawJS = tmpJS.data();
const size_t jsLen = tmpJS.size();
JSONPath& path = paths[j];
const json_path_step_t* lastStep;
const char* valEnd;
if (!path.parsed)
{
if (parseJSPath(path, row, fp[i], false))
goto error;
path.p.last_step--;
}
initJSEngine(jsEg, cs, tmpJS);
if (path.p.last_step < path.p.steps)
goto v_found;
if (path.p.last_step >= path.p.steps && locateJSPath(jsEg, path, &jsErr))
{
if (jsErr)
goto error;
continue;
}
if (json_read_value(&jsEg))
goto error;
lastStep = path.p.last_step + 1;
if (lastStep->type & JSON_PATH_ARRAY)
{
IntType itemSize = 0;
if (jsEg.value_type != JSON_VALUE_ARRAY)
{
const uchar* valStart = jsEg.value_begin;
bool isArrAutoWrap;
if (isInsertMode)
{
if (isReplaceMode)
isArrAutoWrap = lastStep->n_item > 0;
else
{
if (lastStep->n_item == 0)
continue;
isArrAutoWrap = true;
}
}
else
{
if (lastStep->n_item)
continue;
isArrAutoWrap = false;
}
retJS.clear();
/* Wrap the value as an array. */
retJS.append(rawJS, (const char*)valStart - rawJS);
if (isArrAutoWrap)
retJS.append("[");
if (jsEg.value_type == JSON_VALUE_OBJECT)
{
if (json_skip_level(&jsEg))
goto error;
}
if (isArrAutoWrap)
retJS.append((const char*)valStart, jsEg.s.c_str - valStart);
retJS.append(", ");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
if (isArrAutoWrap)
retJS.append("]");
retJS.append((const char*)jsEg.s.c_str, rawJS + jsLen - (const char*)jsEg.s.c_str);
goto continue_point;
}
while (json_scan_next(&jsEg) == 0 && jsEg.state != JST_ARRAY_END)
{
switch (jsEg.state)
{
case JST_VALUE:
if (itemSize == lastStep->n_item)
goto v_found;
itemSize++;
if (json_skip_array_item(&jsEg))
goto error;
break;
default: break;
}
}
if (unlikely(jsEg.s.error))
goto error;
if (!isInsertMode)
continue;
valEnd = (const char*)(jsEg.s.c_str - jsEg.sav_c_len);
retJS.clear();
retJS.append(rawJS, valEnd - rawJS);
if (itemSize > 0)
retJS.append(", ");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append(valEnd, rawJS + jsLen - valEnd);
}
else /*JSON_PATH_KEY*/
{
IntType keySize = 0;
if (jsEg.value_type != JSON_VALUE_OBJECT)
continue;
while (json_scan_next(&jsEg) == 0 && jsEg.state != JST_OBJ_END)
{
switch (jsEg.state)
{
case JST_KEY:
json_string_set_str(&keyName, lastStep->key, lastStep->key_end);
if (json_key_matches(&jsEg, &keyName))
goto v_found;
keySize++;
if (json_skip_key(&jsEg))
goto error;
break;
default: break;
}
}
if (unlikely(jsEg.s.error))
goto error;
if (!isInsertMode)
continue;
valEnd = (const char*)(jsEg.s.c_str - jsEg.sav_c_len);
retJS.clear();
retJS.append(rawJS, valEnd - rawJS);
if (keySize > 0)
retJS.append(", ");
retJS.append("\"");
retJS.append((const char*)lastStep->key, lastStep->key_end - lastStep->key);
retJS.append("\":");
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append(valEnd, rawJS + jsLen - valEnd);
}
goto continue_point;
v_found:
if (!isReplaceMode)
continue;
if (json_read_value(&jsEg))
goto error;
valEnd = (const char*)jsEg.value_begin;
retJS.clear();
if (!json_value_scalar(&jsEg))
{
if (json_skip_level(&jsEg))
goto error;
}
retJS.append(rawJS, valEnd - rawJS);
if (appendJSValue(retJS, cs, row, fp[i + 1]))
goto error;
retJS.append((const char*)jsEg.s.c_str, rawJS + jsLen - (const char*)jsEg.s.c_str);
continue_point:
// tmpJS save the json string for next loop
tmpJS.swap(retJS);
retJS.clear();
}
initJSEngine(jsEg, cs, tmpJS);
retJS.clear();
if (doFormat(&jsEg, retJS, Func_json_format::LOOSE))
goto error;
isNull = false;
return retJS;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,130 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "dataconvert.h"
using namespace dataconvert;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace
{
bool checkKeyInList(const string& res, const uchar* key, const int keyLen)
{
const uchar* curr = (const uchar*)res.c_str() + 2; /* beginning '["' */
const uchar* end = (const uchar*)res.c_str() + res.size() - 1; /* ending '"' */
while (curr < end)
{
int i;
for (i = 0; curr[i] != '"' && i < keyLen; i++)
{
if (curr[i] != key[i])
break;
}
if (curr[i] == '"')
{
if (i == keyLen)
return true;
}
else
{
while (curr[i] != '"')
i++;
}
curr += i + 4; /* skip ', "' */
}
return false;
}
} // namespace
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_keys::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_keys::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
IntType keySize = 0;
string ret;
json_engine_t jsEg;
initJSEngine(jsEg, getCharset(fp[0]), js);
if (fp.size() > 1)
{
if (!path.parsed && parseJSPath(path, row, fp[1], false))
goto error;
if (locateJSPath(jsEg, path))
goto error;
}
if (json_read_value(&jsEg))
goto error;
if (jsEg.value_type != JSON_VALUE_OBJECT)
goto error;
ret.append("[");
while (json_scan_next(&jsEg) == 0 && jsEg.state != JST_OBJ_END)
{
const uchar *keyStart, *keyEnd;
int keyLen;
switch (jsEg.state)
{
case JST_KEY:
keyStart = jsEg.s.c_str;
do
{
keyEnd = jsEg.s.c_str;
} while (json_read_keyname_chr(&jsEg) == 0);
if (unlikely(jsEg.s.error))
goto error;
keyLen = (int)(keyEnd - keyStart);
if (!checkKeyInList(ret, keyStart, keyLen))
{
if (keySize > 0)
ret.append(", ");
ret.append("\"");
ret.append((const char*)keyStart, keyLen);
ret.append("\"");
keySize++;
}
break;
case JST_OBJ_START:
case JST_ARRAY_START:
if (json_skip_level(&jsEg))
break;
break;
default: break;
}
}
if (unlikely(!jsEg.s.error))
{
ret.append("]");
return ret;
}
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,81 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "dataconvert.h"
using namespace dataconvert;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_length::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
int64_t Func_json_length::getIntVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& op_ct)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return 0;
json_engine_t jsEg;
int length = 0;
int err;
initJSEngine(jsEg, getCharset(fp[0]), js);
if (fp.size() > 1)
{
if (!path.parsed && parseJSPath(path, row, fp[1], false))
goto error;
if (locateJSPath(jsEg, path))
goto error;
}
if (json_read_value(&jsEg))
goto error;
if (json_value_scalar(&jsEg))
return 1;
while (!(err = json_scan_next(&jsEg)) && jsEg.state != JST_OBJ_END && jsEg.state != JST_ARRAY_END)
{
switch (jsEg.state)
{
case JST_VALUE:
case JST_KEY: length++; break;
case JST_OBJ_START:
case JST_ARRAY_START:
if (json_skip_level(&jsEg))
goto error;
break;
default: break;
};
}
if (!err)
{
// Parse to the end of the JSON just to check it's valid.
while (json_scan_next(&jsEg) == 0)
{
}
}
if (likely(!jsEg.s.error))
return length;
error:
isNull = true;
return 0;
}
} // namespace funcexp

View File

@ -0,0 +1,260 @@
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace
{
int doMerge(string& retJS, json_engine_t* jsEg1, json_engine_t* jsEg2)
{
if (json_read_value(jsEg1) || json_read_value(jsEg2))
return 1;
if (jsEg1->value_type == JSON_VALUE_OBJECT && jsEg2->value_type == JSON_VALUE_OBJECT)
{
json_engine_t savJSEg1 = *jsEg1;
json_engine_t savJSEg2 = *jsEg2;
int firstKey = 1;
json_string_t keyName;
json_string_set_cs(&keyName, jsEg1->s.cs);
retJS.append("{");
while (json_scan_next(jsEg1) == 0 && jsEg1->state != JST_OBJ_END)
{
const uchar *keyStart, *keyEnd;
/* Loop through the Json_1 keys and compare with the Json_2 keys. */
DBUG_ASSERT(jsEg1->state == JST_KEY);
keyStart = jsEg1->s.c_str;
do
{
keyEnd = jsEg1->s.c_str;
} while (json_read_keyname_chr(jsEg1) == 0);
if (unlikely(jsEg1->s.error))
return 1;
if (firstKey)
firstKey = 0;
else
{
retJS.append(", ");
*jsEg2 = savJSEg2;
}
retJS.append("\"");
retJS.append((const char*)keyStart, (size_t)(keyEnd - keyStart));
retJS.append("\":");
while (json_scan_next(jsEg2) == 0 && jsEg2->state != JST_OBJ_END)
{
int ires;
DBUG_ASSERT(jsEg2->state == JST_KEY);
json_string_set_str(&keyName, keyStart, keyEnd);
if (!json_key_matches(jsEg2, &keyName))
{
if (jsEg2->s.error || json_skip_key(jsEg2))
return 2;
continue;
}
/* Json_2 has same key as Json_1. Merge them. */
if ((ires = doMerge(retJS, jsEg1, jsEg2)))
return ires;
goto merged_j1;
}
if (unlikely(jsEg2->s.error))
return 2;
keyStart = jsEg1->s.c_str;
/* Just append the Json_1 key value. */
if (json_skip_key(jsEg1))
return 1;
retJS.append((const char*)keyStart, jsEg1->s.c_str - keyStart);
merged_j1:
continue;
}
*jsEg2 = savJSEg2;
/*
Now loop through the Json_2 keys.
Skip if there is same key in Json_1
*/
while (json_scan_next(jsEg2) == 0 && jsEg2->state != JST_OBJ_END)
{
const uchar *keyStart, *keyEnd;
DBUG_ASSERT(jsEg2->state == JST_KEY);
keyStart = jsEg2->s.c_str;
do
{
keyEnd = jsEg2->s.c_str;
} while (json_read_keyname_chr(jsEg2) == 0);
if (unlikely(jsEg2->s.error))
return 1;
*jsEg1 = savJSEg1;
while (json_scan_next(jsEg1) == 0 && jsEg1->state != JST_OBJ_END)
{
DBUG_ASSERT(jsEg1->state == JST_KEY);
json_string_set_str(&keyName, keyStart, keyEnd);
if (!json_key_matches(jsEg1, &keyName))
{
if (unlikely(jsEg1->s.error || json_skip_key(jsEg1)))
return 2;
continue;
}
if (json_skip_key(jsEg2) || json_skip_level(jsEg1))
return 1;
goto continue_j2;
}
if (unlikely(jsEg1->s.error))
return 2;
if (firstKey)
firstKey = 0;
else
retJS.append(", ");
if (json_skip_key(jsEg2))
return 1;
retJS.append("\"");
retJS.append((const char*)keyStart, jsEg2->s.c_str - keyStart);
continue_j2:
continue;
}
retJS.append("}");
}
else
{
const uchar *end1, *beg1, *end2, *beg2;
int itemSize1 = 1, itemSize2 = 1;
beg1 = jsEg1->value_begin;
/* Merge as a single array. */
if (jsEg1->value_type == JSON_VALUE_ARRAY)
{
if (json_skip_level_and_count(jsEg1, &itemSize1))
return 1;
end1 = jsEg1->s.c_str - jsEg1->sav_c_len;
}
else
{
retJS.append("[");
if (jsEg1->value_type == JSON_VALUE_OBJECT)
{
if (json_skip_level(jsEg1))
return 1;
end1 = jsEg1->s.c_str;
}
else
end1 = jsEg1->value_end;
}
retJS.append((const char*)beg1, end1 - beg1);
if (json_value_scalar(jsEg2))
{
beg2 = jsEg2->value_begin;
end2 = jsEg2->value_end;
}
else
{
if (jsEg2->value_type == JSON_VALUE_OBJECT)
{
beg2 = jsEg2->value_begin;
if (json_skip_level(jsEg2))
return 2;
}
else
{
beg2 = jsEg2->s.c_str;
if (json_skip_level_and_count(jsEg2, &itemSize2))
return 2;
}
end2 = jsEg2->s.c_str;
}
if (itemSize1 && itemSize2)
retJS.append(", ");
retJS.append((const char*)beg2, end2 - beg2);
if (jsEg2->value_type != JSON_VALUE_ARRAY)
retJS.append("]");
}
return 0;
}
} // namespace
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_merge::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_merge::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
const CHARSET_INFO* js1CS = getCharset(fp[0]);
json_engine_t jsEg1, jsEg2;
string tmpJS{js};
string retJS;
for (size_t i = 1; i < fp.size(); i++)
{
const string_view js2 = fp[i]->data()->getStrVal(row, isNull);
if (isNull)
goto error;
initJSEngine(jsEg1, js1CS, tmpJS);
initJSEngine(jsEg2, getCharset(fp[i]), js2);
if (doMerge(retJS, &jsEg1, &jsEg2))
goto error;
// tmpJS save the merge result for next loop
tmpJS.swap(retJS);
retJS.clear();
}
initJSEngine(jsEg1, js1CS, tmpJS);
retJS.clear();
if (doFormat(&jsEg1, retJS, Func_json_format::LOOSE))
goto error;
isNull = false;
return retJS;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,308 @@
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace
{
int copyValuePatch(string& retJS, json_engine_t* jsEg)
{
int firstKey = 1;
if (jsEg->value_type != JSON_VALUE_OBJECT)
{
const uchar *beg, *end;
beg = jsEg->value_begin;
if (!json_value_scalar(jsEg))
{
if (json_skip_level(jsEg))
return 1;
end = jsEg->s.c_str;
}
else
end = jsEg->value_end;
retJS.append((const char*)beg, end - beg);
return 0;
}
/* JSON_VALUE_OBJECT */
retJS.append("{");
while (json_scan_next(jsEg) == 0 && jsEg->state != JST_OBJ_END)
{
const uchar* keyStart;
/* Loop through the Json_1 keys and compare with the Json_2 keys. */
DBUG_ASSERT(jsEg->state == JST_KEY);
keyStart = jsEg->s.c_str;
if (json_read_value(jsEg))
return 1;
if (jsEg->value_type == JSON_VALUE_NULL)
continue;
if (!firstKey)
retJS.append(", ");
else
firstKey = 0;
retJS.append("\"");
retJS.append((const char*)keyStart, jsEg->value_begin - keyStart);
if (copyValuePatch(retJS, jsEg))
return 1;
}
retJS.append("}");
return 0;
}
int doMergePatch(string& retJS, json_engine_t* jsEg1, json_engine_t* jsEg2, bool& isEmpty)
{
if (json_read_value(jsEg1) || json_read_value(jsEg2))
return 1;
if (jsEg1->value_type == JSON_VALUE_OBJECT && jsEg2->value_type == JSON_VALUE_OBJECT)
{
json_engine_t savJSEg1 = *jsEg1;
json_engine_t savJSEg2 = *jsEg2;
int firstKey = 1;
json_string_t keyName;
size_t savLen;
bool mrgEmpty;
isEmpty = false;
json_string_set_cs(&keyName, jsEg1->s.cs);
retJS.append("{");
while (json_scan_next(jsEg1) == 0 && jsEg1->state != JST_OBJ_END)
{
const uchar *keyStart, *keyEnd;
/* Loop through the Json_1 keys and compare with the Json_2 keys. */
DBUG_ASSERT(jsEg1->state == JST_KEY);
keyStart = jsEg1->s.c_str;
do
{
keyEnd = jsEg1->s.c_str;
} while (json_read_keyname_chr(jsEg1) == 0);
if (jsEg1->s.error)
return 1;
savLen = retJS.size();
if (!firstKey)
{
retJS.append(", ");
*jsEg2 = savJSEg2;
}
retJS.append("\"");
retJS.append((const char*)keyStart, keyEnd - keyStart);
retJS.append("\":");
while (json_scan_next(jsEg2) == 0 && jsEg2->state != JST_OBJ_END)
{
int ires;
DBUG_ASSERT(jsEg2->state == JST_KEY);
json_string_set_str(&keyName, keyStart, keyEnd);
if (!json_key_matches(jsEg2, &keyName))
{
if (jsEg2->s.error || json_skip_key(jsEg2))
return 2;
continue;
}
/* Json_2 has same key as Json_1. Merge them. */
if ((ires = doMergePatch(retJS, jsEg1, jsEg2, mrgEmpty)))
return ires;
if (mrgEmpty)
retJS = retJS.substr(0, savLen);
else
firstKey = 0;
goto merged_j1;
}
if (jsEg2->s.error)
return 2;
keyStart = jsEg1->s.c_str;
/* Just append the Json_1 key value. */
if (json_skip_key(jsEg1))
return 1;
retJS.append((const char*)keyStart, jsEg1->s.c_str - keyStart);
firstKey = 0;
merged_j1:
continue;
}
*jsEg2 = savJSEg2;
/*
Now loop through the Json_2 keys.
Skip if there is same key in Json_1
*/
while (json_scan_next(jsEg2) == 0 && jsEg2->state != JST_OBJ_END)
{
const uchar *keyStart, *keyEnd;
DBUG_ASSERT(jsEg2->state == JST_KEY);
keyStart = jsEg2->s.c_str;
do
{
keyEnd = jsEg2->s.c_str;
} while (json_read_keyname_chr(jsEg2) == 0);
if (jsEg2->s.error)
return 1;
*jsEg1 = savJSEg1;
while (json_scan_next(jsEg1) == 0 && jsEg1->state != JST_OBJ_END)
{
DBUG_ASSERT(jsEg1->state == JST_KEY);
json_string_set_str(&keyName, keyStart, keyEnd);
if (!json_key_matches(jsEg1, &keyName))
{
if (jsEg1->s.error || json_skip_key(jsEg1))
return 2;
continue;
}
if (json_skip_key(jsEg2) || json_skip_level(jsEg1))
return 1;
goto continue_j2;
}
if (jsEg1->s.error)
return 2;
savLen = retJS.size();
if (!firstKey)
retJS.append(", ");
retJS.append("\"");
retJS.append((const char*)keyStart, keyEnd - keyStart);
retJS.append("\":");
if (json_read_value(jsEg2))
return 1;
if (jsEg2->value_type == JSON_VALUE_NULL)
retJS = retJS.substr(0, savLen);
else
{
if (copyValuePatch(retJS, jsEg2))
return 1;
firstKey = 0;
}
continue_j2:
continue;
}
retJS.append("}");
}
else
{
if (!json_value_scalar(jsEg1) && json_skip_level(jsEg1))
return 1;
isEmpty = (jsEg2->value_type == JSON_VALUE_NULL);
if (!isEmpty && copyValuePatch(retJS, jsEg2))
return 1;
}
return 0;
}
} // namespace
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_merge_patch::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_merge_patch::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
// JSON_MERGE_PATCH return NULL if any argument is NULL
bool isEmpty = false, hasNullArg = false;
const string_view js = fp[0]->data()->getStrVal(row, isNull);
hasNullArg = isNull;
if (isNull)
isNull = false;
json_engine_t jsEg1, jsEg2;
jsEg1.s.error = jsEg2.s.error = 0;
string tmpJS{js};
string retJS;
for (size_t i = 1; i < fp.size(); i++)
{
const string_view js2 = fp[i]->data()->getStrVal(row, isNull);
if (isNull)
{
hasNullArg = true;
isNull = false;
goto next;
}
initJSEngine(jsEg2, getCharset(fp[i]), js2);
if (hasNullArg)
{
if (json_read_value(&jsEg2))
goto error;
if (jsEg2.value_type == JSON_VALUE_OBJECT)
goto next;
hasNullArg = false;
retJS.append(js2.data());
goto next;
}
initJSEngine(jsEg1, getCharset(fp[0]), tmpJS);
if (doMergePatch(retJS, &jsEg1, &jsEg2, isEmpty))
goto error;
if (isEmpty)
retJS.append("null");
next:
// tmpJS save the merge result for next loop
tmpJS.swap(retJS);
retJS.clear();
}
if (hasNullArg)
goto error;
initJSEngine(jsEg1, getCharset(fp[0]), tmpJS);
retJS.clear();
if (doFormat(&jsEg1, retJS, Func_json_format::LOOSE))
goto error;
isNull = false;
return retJS;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,47 @@
#include <string_view>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_normalize::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_normalize::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
using DynamicString = unique_ptr<DYNAMIC_STRING, decltype(&dynstr_free)>;
DynamicString str{new DYNAMIC_STRING(), dynstr_free};
if (init_dynamic_string(str.get(), NULL, 0, 0))
goto error;
if (json_normalize(str.get(), js.data(), js.size(), getCharset(fp[0])))
goto error;
return str->str;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,54 @@
#include <string_view>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "mcs_datatype.h"
using namespace datatypes;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_object::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp.size() > 0 ? fp[0]->data()->resultType() : resultType;
}
string Func_json_object::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
if (fp.size() == 0)
return "{}";
const CHARSET_INFO* retCS = type.getCharset();
string ret("{");
if (appendJSKeyName(ret, retCS, row, fp[0]) || appendJSValue(ret, retCS, row, fp[1]))
goto error;
for (size_t i = 2; i < fp.size(); i += 2)
{
ret.append(", ");
if (appendJSKeyName(ret, retCS, row, fp[i]) || appendJSValue(ret, retCS, row, fp[i + 1]))
goto error;
}
ret.append("}");
return ret;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,300 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "rowgroup.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace
{
int checkOverlapsWithObj(json_engine_t* jsEg, json_engine_t* jsEg2, bool compareWhole);
bool checkOverlaps(json_engine_t* jsEg1, json_engine_t* jsEg2, bool compareWhole);
/*
When the two values match or don't match we need to return true or false.
But we can have some more elements in the array left or some more keys
left in the object that we no longer want to compare. In this case,
we want to skip the current item.
*/
void jsonSkipCurrLevel(json_engine_t* jsEg1, json_engine_t* jsEg2)
{
json_skip_level(jsEg1);
json_skip_level(jsEg2);
}
/* At least one of the two arguments is a scalar. */
bool checkOverlapsWithScalar(json_engine_t* jsEg1, json_engine_t* jsEg2)
{
if (json_value_scalar(jsEg2))
{
if (jsEg1->value_type == jsEg2->value_type)
{
if (jsEg1->value_type == JSON_VALUE_NUMBER)
{
double dj, dv;
char* end;
int err;
dj = jsEg1->s.cs->strntod((char*)jsEg1->value, jsEg1->value_len, &end, &err);
dv = jsEg2->s.cs->strntod((char*)jsEg2->value, jsEg2->value_len, &end, &err);
return (fabs(dj - dv) < 1e-12);
}
else if (jsEg1->value_type == JSON_VALUE_STRING)
{
return jsEg2->value_len == jsEg1->value_len &&
memcmp(jsEg2->value, jsEg1->value, jsEg2->value_len) == 0;
}
}
return jsEg2->value_type == jsEg1->value_type;
}
else if (jsEg2->value_type == JSON_VALUE_ARRAY)
{
while (json_scan_next(jsEg2) == 0 && jsEg2->state == JST_VALUE)
{
if (json_read_value(jsEg2))
return false;
if (jsEg1->value_type == jsEg2->value_type)
{
int res1 = checkOverlapsWithScalar(jsEg1, jsEg2);
if (res1)
return true;
}
if (!json_value_scalar(jsEg2))
json_skip_level(jsEg2);
}
}
return false;
}
/*
Compare when one is object and other is array. This means we are looking
for the object in the array. Hence, when value type of an element of the
array is object, then compare the two objects entirely. If they are
equal return true else return false.
*/
bool jsonCmpWithArrAndObj(json_engine_t* jsEg1, json_engine_t* jsEg2)
{
st_json_engine_t locjsEg2 = *jsEg2;
while (json_scan_next(jsEg1) == 0 && jsEg1->state == JST_VALUE)
{
if (json_read_value(jsEg1))
return false;
if (jsEg1->value_type == JSON_VALUE_OBJECT)
{
int res1 = checkOverlapsWithObj(jsEg1, jsEg2, true);
if (res1)
return true;
*jsEg2 = locjsEg2;
}
if (!json_value_scalar(jsEg1))
json_skip_level(jsEg1);
}
return false;
}
bool jsonCmpArrInOrder(json_engine_t* jsEg1, json_engine_t* jsEg2)
{
bool res = false;
while (json_scan_next(jsEg1) == 0 && json_scan_next(jsEg2) == 0 && jsEg1->state == JST_VALUE &&
jsEg2->state == JST_VALUE)
{
if (json_read_value(jsEg1) || json_read_value(jsEg2))
return false;
if (jsEg1->value_type != jsEg2->value_type)
{
jsonSkipCurrLevel(jsEg1, jsEg2);
return false;
}
res = checkOverlaps(jsEg1, jsEg2, true);
if (!res)
{
jsonSkipCurrLevel(jsEg1, jsEg2);
return false;
}
}
res = (jsEg2->state == JST_ARRAY_END || jsEg2->state == JST_OBJ_END ? true : false);
jsonSkipCurrLevel(jsEg1, jsEg2);
return res;
}
int checkOverlapsWithArr(json_engine_t* jsEg1, json_engine_t* jsEg2, bool compareWhole)
{
if (jsEg2->value_type == JSON_VALUE_ARRAY)
{
if (compareWhole)
return jsonCmpArrInOrder(jsEg1, jsEg2);
json_engine_t locjsEg2ue = *jsEg2, currJSEg = *jsEg1;
while (json_scan_next(jsEg1) == 0 && jsEg1->state == JST_VALUE)
{
if (json_read_value(jsEg1))
return false;
currJSEg = *jsEg1;
while (json_scan_next(jsEg2) == 0 && jsEg2->state == JST_VALUE)
{
if (json_read_value(jsEg2))
return false;
if (jsEg1->value_type == jsEg2->value_type)
{
int res1 = checkOverlaps(jsEg1, jsEg2, true);
if (res1)
return true;
}
else
{
if (!json_value_scalar(jsEg2))
json_skip_level(jsEg2);
}
*jsEg1 = currJSEg;
}
*jsEg2 = locjsEg2ue;
if (!json_value_scalar(jsEg1))
json_skip_level(jsEg1);
}
return false;
}
else if (jsEg2->value_type == JSON_VALUE_OBJECT)
{
if (compareWhole)
{
jsonSkipCurrLevel(jsEg1, jsEg2);
return false;
}
return jsonCmpWithArrAndObj(jsEg1, jsEg2);
}
else
return checkOverlapsWithScalar(jsEg2, jsEg1);
}
int checkOverlapsWithObj(json_engine_t* jsEg1, json_engine_t* jsEg2, bool compareWhole)
{
if (jsEg2->value_type == JSON_VALUE_OBJECT)
{
/* Find at least one common key-value pair */
json_string_t keyName;
bool foundKey = false, foundVal = false;
json_engine_t locJSEg = *jsEg1;
const uchar *keyStart, *keyEnd;
json_string_set_cs(&keyName, jsEg2->s.cs);
while (json_scan_next(jsEg2) == 0 && jsEg2->state == JST_KEY)
{
keyStart = jsEg2->s.c_str;
do
{
keyEnd = jsEg2->s.c_str;
} while (json_read_keyname_chr(jsEg2) == 0);
if (unlikely(jsEg2->s.error))
return false;
json_string_set_str(&keyName, keyStart, keyEnd);
foundKey = findKeyInObject(jsEg1, &keyName);
foundVal = 0;
if (foundKey)
{
if (json_read_value(jsEg1) || json_read_value(jsEg2))
return false;
/*
The value of key-value pair can be an be anything. If it is an object
then we need to compare the whole value and if it is an array then
we need to compare the elements in that order. So set compareWhole
to true.
*/
if (jsEg1->value_type == jsEg2->value_type)
foundVal = checkOverlaps(jsEg1, jsEg2, true);
if (foundVal)
{
if (!compareWhole)
return true;
*jsEg1 = locJSEg;
}
else
{
if (compareWhole)
{
jsonSkipCurrLevel(jsEg1, jsEg2);
return false;
}
*jsEg1 = locJSEg;
}
}
else
{
if (compareWhole)
{
jsonSkipCurrLevel(jsEg1, jsEg2);
return false;
}
json_skip_key(jsEg2);
*jsEg1 = locJSEg;
}
}
jsonSkipCurrLevel(jsEg1, jsEg2);
return compareWhole ? true : false;
}
else if (jsEg2->value_type == JSON_VALUE_ARRAY)
{
if (compareWhole)
{
jsonSkipCurrLevel(jsEg1, jsEg2);
return false;
}
return jsonCmpWithArrAndObj(jsEg2, jsEg1);
}
return false;
}
bool checkOverlaps(json_engine_t* jsEg1, json_engine_t* jsEg2, bool compareWhole)
{
switch (jsEg1->value_type)
{
case JSON_VALUE_OBJECT: return checkOverlapsWithObj(jsEg1, jsEg2, compareWhole);
case JSON_VALUE_ARRAY: return checkOverlapsWithArr(jsEg1, jsEg2, compareWhole);
default: return checkOverlapsWithScalar(jsEg1, jsEg2);
}
return false;
}
} // namespace
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_overlaps::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
/**
* getBoolVal API definition
*/
bool Func_json_overlaps::getBoolVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
bool isNullJS1 = false, isNullJS2 = false;
const string_view js1 = fp[0]->data()->getStrVal(row, isNullJS1);
const string_view js2 = fp[1]->data()->getStrVal(row, isNullJS2);
if (isNullJS1 || isNullJS2)
return false;
json_engine_t jsEg1, jsEg2;
initJSEngine(jsEg1, getCharset(fp[0]), js1);
initJSEngine(jsEg2, getCharset(fp[1]), js2);
if (json_read_value(&jsEg1) || json_read_value(&jsEg2))
return false;
bool result = checkOverlaps(&jsEg1, &jsEg2, false);
if (unlikely(jsEg1.s.error || jsEg2.s.error))
return false;
return result;
}
} // namespace funcexp

View File

@ -0,0 +1,48 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
namespace funcexp
{
bool JSONEgWrapper::checkAndGetComplexVal(string& ret, int* error)
{
if (json_value_scalar(this))
{
/* We skip scalar values. */
if (json_scan_next(this))
*error = 1;
return true;
}
const uchar* tmpValue = value;
if (json_skip_level(this))
{
*error = 1;
return true;
}
ret.append((const char*)value, s.c_str - tmpValue);
return false;
}
CalpontSystemCatalog::ColType Func_json_query::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_query::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
string ret;
isNull = JSONPathWrapper::extract(ret, row, fp[0], fp[1]);
return isNull ? "" : ret;
}
} // namespace funcexp

View File

@ -0,0 +1,44 @@
#include <string_view>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "mcs_datatype.h"
using namespace datatypes;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_quote::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
std::string Func_json_quote::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull || !isCharType(fp[0]->data()->resultType().colDataType))
return "";
string ret("\"");
isNull = appendEscapedJS(ret, &my_charset_utf8mb4_bin, js, getCharset(fp[0]));
if (isNull)
return "";
ret.append("\"");
return ret;
}
} // namespace funcexp

View File

@ -0,0 +1,164 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "dataconvert.h"
using namespace dataconvert;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_remove::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_remove::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
json_engine_t jsEg;
int jsErr = 0;
json_string_t keyName;
const CHARSET_INFO* cs = getCharset(fp[0]);
json_string_set_cs(&keyName, cs);
initJSPaths(paths, fp, 1, 1);
string retJS;
string tmpJS{js};
for (size_t i = 1, j = 0; i < fp.size(); i++, j++)
{
const char* rawJS = tmpJS.data();
const size_t jsLen = tmpJS.size();
JSONPath& path = paths[j];
const json_path_step_t* lastStep;
const char *remStart = nullptr, *remEnd = nullptr;
IntType itemSize = 0;
if (!path.parsed)
{
if (parseJSPath(path, row, fp[i], false))
goto error;
path.p.last_step--;
if (path.p.last_step < path.p.steps)
{
path.p.s.error = TRIVIAL_PATH_NOT_ALLOWED;
goto error;
}
}
initJSEngine(jsEg, cs, rawJS);
if (path.p.last_step < path.p.steps)
goto v_found;
if (locateJSPath(jsEg, path, &jsErr) && jsErr)
goto error;
if (json_read_value(&jsEg))
goto error;
lastStep = path.p.last_step + 1;
if (lastStep->type & JSON_PATH_ARRAY)
{
if (jsEg.value_type != JSON_VALUE_ARRAY)
continue;
while (json_scan_next(&jsEg) == 0 && jsEg.state != JST_ARRAY_END)
{
switch (jsEg.state)
{
case JST_VALUE:
if (itemSize == lastStep->n_item)
{
remStart = (const char*)(jsEg.s.c_str - (itemSize ? jsEg.sav_c_len : 0));
goto v_found;
}
itemSize++;
if (json_skip_array_item(&jsEg))
goto error;
break;
default: break;
}
}
if (unlikely(jsEg.s.error))
goto error;
continue;
}
else /*JSON_PATH_KEY*/
{
if (jsEg.value_type != JSON_VALUE_OBJECT)
continue;
while (json_scan_next(&jsEg) == 0 && jsEg.state != JST_OBJ_END)
{
switch (jsEg.state)
{
case JST_KEY:
if (itemSize == 0)
remStart = (const char*)(jsEg.s.c_str - jsEg.sav_c_len);
json_string_set_str(&keyName, lastStep->key, lastStep->key_end);
if (json_key_matches(&jsEg, &keyName))
goto v_found;
if (json_skip_key(&jsEg))
goto error;
remStart = (const char*)jsEg.s.c_str;
itemSize++;
break;
default: break;
}
}
if (unlikely(jsEg.s.error))
goto error;
continue;
}
v_found:
if (json_skip_key(&jsEg) || json_scan_next(&jsEg))
goto error;
remEnd = (jsEg.state == JST_VALUE && itemSize == 0) ? (const char*)jsEg.s.c_str
: (const char*)(jsEg.s.c_str - jsEg.sav_c_len);
retJS.clear();
retJS.append(rawJS, remStart - rawJS);
if (jsEg.state == JST_KEY && itemSize > 0)
retJS.append(",");
retJS.append(remEnd, rawJS + jsLen - remEnd);
tmpJS.swap(retJS);
retJS.clear();
}
initJSEngine(jsEg, cs, tmpJS);
retJS.clear();
if (doFormat(&jsEg, retJS, Func_json_format::LOOSE))
goto error;
isNull = false;
return retJS;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,223 @@
#include <string_view>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace
{
static bool appendJSPath(string& ret, const json_path_t* p)
{
const json_path_step_t* c;
try
{
ret.append("\"$");
for (c = p->steps + 1; c <= p->last_step; c++)
{
if (c->type & JSON_PATH_KEY)
{
ret.append(".", 1);
ret.append((const char*)c->key, c->key_end - c->key);
}
else /*JSON_PATH_ARRAY*/
{
ret.append("[");
ret.append(to_string(c->n_item));
ret.append("]");
}
}
ret.append("\"");
}
catch (...)
{
return true;
}
return false;
}
} // namespace
namespace funcexp
{
const static int wildOne = '_';
const static int wildMany = '%';
int Func_json_search::cmpJSValWild(json_engine_t* jsEg, const string_view& cmpStr, const CHARSET_INFO* cs)
{
if (jsEg->value_type != JSON_VALUE_STRING || !jsEg->value_escaped)
return cs->wildcmp((const char*)jsEg->value, (const char*)(jsEg->value + jsEg->value_len),
(const char*)cmpStr.data(), (const char*)cmpStr.data() + cmpStr.size(), escape,
wildOne, wildMany)
? 0
: 1;
{
int strLen = (jsEg->value_len / 1024 + 1) * 1024;
char* buf = (char*)alloca(strLen);
if ((strLen = json_unescape(jsEg->s.cs, jsEg->value, jsEg->value + jsEg->value_len, jsEg->s.cs,
(uchar*)buf, (uchar*)(buf + strLen))) <= 0)
return 0;
return cs->wildcmp(buf, buf + strLen, cmpStr.data(), cmpStr.data() + cmpStr.size(), escape, wildOne,
wildMany)
? 0
: 1;
}
}
CalpontSystemCatalog::ColType Func_json_search::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_search::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
string ret;
bool isNullJS = false, isNullVal = false;
const string_view js = fp[0]->data()->getStrVal(row, isNull);
const string_view cmpStr = fp[2]->data()->getStrVal(row, isNull);
if (isNullJS || isNullVal)
{
isNull = true;
return "";
}
if (!isModeParsed)
{
if (!isModeConst)
isModeConst = (dynamic_cast<ConstantColumn*>(fp[1]->data()) != nullptr);
string mode = fp[1]->data()->getStrVal(row, isNull);
if (isNull)
return "";
transform(mode.begin(), mode.end(), mode.begin(), ::tolower);
if (mode != "one" && mode != "all")
{
isNull = true;
return "";
}
isModeOne = (mode == "one");
isModeParsed = isModeConst;
}
if (fp.size() >= 4)
{
if (dynamic_cast<ConstantColumn*>(fp[3]->data()) == nullptr)
{
isNull = true;
return "";
}
bool isNullEscape = false;
const string_view escapeStr = fp[3]->data()->getStrVal(row, isNullEscape);
if (escapeStr.size() > 1)
{
isNull = true;
return "";
}
escape = isNullEscape ? '\\' : escapeStr[0];
}
json_engine_t jsEg;
json_path_t p, savPath;
const CHARSET_INFO* cs = getCharset(fp[0]);
#ifdef MYSQL_GE_1009
int arrayCounter[JSON_DEPTH_LIMIT];
bool hasNegPath = 0;
#endif
int pathFound = 0;
initJSPaths(paths, fp, 4, 1);
for (size_t i = 4; i < fp.size(); i++)
{
JSONPath& path = paths[i - 4];
if (!path.parsed)
{
if (parseJSPath(path, row, fp[i]))
goto error;
#ifdef MYSQL_GE_1009
hasNegPath |= path.p.types_used & JSON_PATH_NEGATIVE_INDEX;
#endif
}
}
json_get_path_start(&jsEg, cs, (const uchar*)js.data(), (const uchar*)js.data() + js.size(), &p);
while (json_get_path_next(&jsEg, &p) == 0)
{
#ifdef MYSQL_GE_1009
if (hasNegPath && jsEg.value_type == JSON_VALUE_ARRAY &&
json_skip_array_and_count(&jsEg, arrayCounter + (p.last_step - p.steps)))
goto error;
#endif
if (json_value_scalar(&jsEg))
{
#ifdef MYSQL_GE_1009
bool isMatch = matchJSPath(paths, &p, jsEg.value_type, arrayCounter);
#else
bool isMatch = matchJSPath(paths, &p, jsEg.value_type);
#endif
if ((fp.size() < 5 || isMatch) && cmpJSValWild(&jsEg, cmpStr, cs) != 0)
{
++pathFound;
if (pathFound == 1)
{
savPath = p;
savPath.last_step = savPath.steps + (p.last_step - p.steps);
}
else
{
if (pathFound == 2)
{
ret.append("[");
if (appendJSPath(ret, &savPath))
goto error;
}
ret.append(", ");
if (appendJSPath(ret, &p))
goto error;
}
if (isModeOne)
goto end;
}
}
}
end:
if (pathFound == 0)
goto error;
if (pathFound == 1)
{
if (appendJSPath(ret, &savPath))
goto error;
}
else
ret.append("]");
isNull = false;
return ret;
error:
isNull = true;
return "";
}
} // namespace funcexp

View File

@ -0,0 +1,53 @@
#include "functor_json.h"
#include "functioncolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_type::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_type::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
json_engine_t jsEg;
string result;
initJSEngine(jsEg, getCharset(fp[0]), js);
if (json_read_value(&jsEg))
{
isNull = true;
return "";
}
switch (jsEg.value_type)
{
case JSON_VALUE_OBJECT: result = "OBJECT"; break;
case JSON_VALUE_ARRAY: result = "ARRAY"; break;
case JSON_VALUE_STRING: result = "STRING"; break;
case JSON_VALUE_NUMBER: result = (jsEg.num_flags & JSON_NUM_FRAC_PART) ? "DOUBLE" : "INTEGER"; break;
case JSON_VALUE_TRUE:
case JSON_VALUE_FALSE: result = "BOOLEAN"; break;
default: result = "NULL"; break;
}
return result;
}
} // namespace funcexp

View File

@ -0,0 +1,51 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "jsonhelpers.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_unquote::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
std::string Func_json_unquote::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return "";
json_engine_t jsEg;
int strLen;
const CHARSET_INFO* cs = type.getCharset();
initJSEngine(jsEg, cs, js);
json_read_value(&jsEg);
if (unlikely(jsEg.s.error) || jsEg.value_type != JSON_VALUE_STRING)
return js.data();
char* buf = (char*)alloca(jsEg.value_len);
if ((strLen = json_unescape(cs, jsEg.value, jsEg.value + jsEg.value_len, &my_charset_utf8mb3_general_ci,
(uchar*)buf, (uchar*)(buf + jsEg.value_len))) >= 0)
{
buf[strLen] = '\0';
string ret = buf;
return strLen == 0 ? "" : ret;
}
return js.data();
}
} // namespace funcexp

View File

@ -0,0 +1,34 @@
#include <string_view>
using namespace std;
#include "functor_json.h"
#include "functioncolumn.h"
#include "rowgroup.h"
using namespace execplan;
using namespace rowgroup;
#include "dataconvert.h"
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
CalpontSystemCatalog::ColType Func_json_valid::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
/**
* getBoolVal API definition
*/
bool Func_json_valid::getBoolVal(Row& row, FunctionParm& fp, bool& isNull,
CalpontSystemCatalog::ColType& type)
{
const string_view js = fp[0]->data()->getStrVal(row, isNull);
if (isNull)
return false;
return json_valid(js.data(), js.size(), getCharset(fp[0]));
}
} // namespace funcexp

View File

@ -0,0 +1,122 @@
#include "functor_json.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
using namespace execplan;
#include "rowgroup.h"
using namespace rowgroup;
#include "joblisttypes.h"
using namespace joblist;
#include "jsonhelpers.h"
using namespace funcexp::helpers;
namespace funcexp
{
bool JSONEgWrapper::checkAndGetScalar(string& ret, int* error)
{
CHARSET_INFO* cs;
const uchar* js;
uint jsLen;
if (!json_value_scalar(this))
{
/* We only look for scalar values! */
if (json_skip_level(this) || json_scan_next(this))
*error = 1;
return true;
}
if (value_type == JSON_VALUE_TRUE || value_type == JSON_VALUE_FALSE)
{
cs = &my_charset_utf8mb4_bin;
js = (const uchar*)((value_type == JSON_VALUE_TRUE) ? "1" : "0");
jsLen = 1;
}
else
{
cs = s.cs;
js = value;
jsLen = value_len;
}
int strLen = jsLen * cs->mbmaxlen;
char* buf = (char*)alloca(jsLen + strLen);
if ((strLen = json_unescape(cs, js, js + jsLen, cs, (uchar*)buf, (uchar*)buf + jsLen + strLen)) > 0)
{
buf[strLen] = '\0';
ret.append(buf);
return 0;
}
return strLen;
}
/*
Returns NULL, not an error if the found value
is not a scalar.
*/
bool JSONPathWrapper::extract(std::string& ret, rowgroup::Row& row, execplan::SPTP& funcParamJS,
execplan::SPTP& funcParamPath)
{
bool isNullJS = false, isNullPath = false;
const string& js = funcParamJS->data()->getStrVal(row, isNullJS);
const string_view jsp = funcParamPath->data()->getStrVal(row, isNullPath);
if (isNullJS || isNullPath)
return true;
int error = 0;
if (!parsed)
{
if (!constant)
{
ConstantColumn* constCol = dynamic_cast<ConstantColumn*>(funcParamPath->data());
constant = (constCol != nullptr);
}
if (isNullPath || json_path_setup(&p, getCharset(funcParamPath), (const uchar*)jsp.data(),
(const uchar*)jsp.data() + jsp.size()))
return true;
parsed = constant;
}
JSONEgWrapper je(js, getCharset(funcParamJS));
currStep = p.steps;
do
{
if (error)
return true;
IntType arrayCounters[JSON_DEPTH_LIMIT];
if (json_find_path(&je, &p, &currStep, arrayCounters))
return true;
if (json_read_value(&je))
return true;
} while (unlikely(checkAndGetValue(&je, ret, &error)));
return false;
}
CalpontSystemCatalog::ColType Func_json_value::operationType(FunctionParm& fp,
CalpontSystemCatalog::ColType& resultType)
{
return fp[0]->data()->resultType();
}
string Func_json_value::getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type)
{
string ret;
isNull = JSONPathWrapper::extract(ret, row, fp[0], fp[1]);
return isNull ? "" : ret;
}
} // namespace funcexp

View File

@ -21,7 +21,7 @@
* *
* *
****************************************************************************/ ****************************************************************************/
#include "functor_json.h"
#include <boost/thread/mutex.hpp> #include <boost/thread/mutex.hpp>
#include "funcexp.h" #include "funcexp.h"
@ -143,6 +143,39 @@ FuncExp::FuncExp()
fFuncMap["isnottrue"] = new Func_IsNotTrue(); fFuncMap["isnottrue"] = new Func_IsNotTrue();
fFuncMap["isfalse"] = new Func_IsFalse(); fFuncMap["isfalse"] = new Func_IsFalse();
fFuncMap["isnotfalse"] = new Func_IsNotFalse(); fFuncMap["isnotfalse"] = new Func_IsNotFalse();
fFuncMap["json_array"] = new Func_json_array();
fFuncMap["json_array_append"] = new Func_json_array_append();
fFuncMap["json_array_insert"] = new Func_json_array_insert();
fFuncMap["json_contains"] = new Func_json_contains();
fFuncMap["json_contains_path"] = new Func_json_contains_path();
fFuncMap["json_compact"] = new Func_json_format(Func_json_format::COMPACT);
fFuncMap["json_depth"] = new Func_json_depth();
fFuncMap["json_equals"] = new Func_json_equals();
fFuncMap["json_exists"] = new Func_json_exists();
fFuncMap["json_extract"] = new Func_json_extract();
fFuncMap["json_format"] = new Func_json_format();
fFuncMap["json_insert"] = new Func_json_insert();
fFuncMap["json_keys"] = new Func_json_keys();
fFuncMap["json_length"] = new Func_json_length();
fFuncMap["json_loose"] = new Func_json_format(Func_json_format::LOOSE);
fFuncMap["json_merge"] = new Func_json_merge();
fFuncMap["json_merge_patch"] = new Func_json_merge_patch();
fFuncMap["json_merge_preserve"] = new Func_json_merge();
fFuncMap["json_normalize"] = new Func_json_normalize();
fFuncMap["json_object"] = new Func_json_object();
#ifdef MYSQL_GE_1009
fFuncMap["json_overlaps"] = new Func_json_overlaps();
#endif
fFuncMap["json_query"] = new Func_json_query();
fFuncMap["json_quote"] = new Func_json_quote();
fFuncMap["json_remove"] = new Func_json_remove();
fFuncMap["json_replace"] = new Func_json_insert(Func_json_insert::REPLACE);
fFuncMap["json_search"] = new Func_json_search();
fFuncMap["json_set"] = new Func_json_insert(Func_json_insert::SET);
fFuncMap["json_type"] = new Func_json_type();
fFuncMap["json_unquote"] = new Func_json_unquote();
fFuncMap["json_valid"] = new Func_json_valid();
fFuncMap["json_value"] = new Func_json_value();
fFuncMap["last_day"] = new Func_last_day(); fFuncMap["last_day"] = new Func_last_day();
fFuncMap["lcase"] = new Func_lcase(); // dlh fFuncMap["lcase"] = new Func_lcase(); // dlh
fFuncMap["least"] = new Func_least(); // dlh fFuncMap["least"] = new Func_least(); // dlh

View File

@ -0,0 +1,679 @@
#pragma once
#include <string>
#define PREFER_MY_CONFIG_H
#include <mariadb.h>
#include <mysql.h>
#include <my_sys.h>
#include <json_lib.h>
#include "collation.h"
#include "functor_bool.h"
#include "functor_int.h"
#include "functor_str.h"
// Check if mariadb version >= 10.9
#if MYSQL_VERSION_ID >= 100900
#ifndef MYSQL_GE_1009
#define MYSQL_GE_1009
#endif
#endif
namespace funcexp
{
// The json_path_t wrapper include some flags
struct JSONPath
{
public:
JSONPath() : constant(false), parsed(false), currStep(nullptr)
{
}
json_path_t p;
bool constant; // check if the argument is constant
bool parsed; // check if the argument is parsed
json_path_step_t* currStep;
};
class JSONEgWrapper : public json_engine_t
{
public:
JSONEgWrapper(CHARSET_INFO* cs, const uchar* str, const uchar* end)
{
json_scan_start(this, cs, str, end);
}
JSONEgWrapper(const std::string& str, CHARSET_INFO* cs)
: JSONEgWrapper(cs, (const uchar*)str.data(), (const uchar*)str.data() + str.size())
{
}
bool checkAndGetScalar(std::string& ret, int* error);
bool checkAndGetComplexVal(std::string& ret, int* error);
};
class JSONPathWrapper : public JSONPath
{
protected:
virtual ~JSONPathWrapper()
{
}
virtual bool checkAndGetValue(JSONEgWrapper* je, std::string& ret, int* error) = 0;
bool extract(std::string& ret, rowgroup::Row& row, execplan::SPTP& funcParmJS,
execplan::SPTP& funcParmPath);
};
/** @brief Func_json_valid class
*/
class Func_json_valid : public Func_Bool
{
public:
Func_json_valid() : Func_Bool("json_valid")
{
}
~Func_json_valid()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
bool getBoolVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_depth class
*/
class Func_json_depth : public Func_Int
{
public:
Func_json_depth() : Func_Int("json_depth")
{
}
virtual ~Func_json_depth()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
int64_t getIntVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_length class
*/
class Func_json_length : public Func_Int
{
protected:
JSONPath path;
public:
Func_json_length() : Func_Int("json_length")
{
}
virtual ~Func_json_length()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
int64_t getIntVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_equals class
*/
class Func_json_equals : public Func_Bool
{
public:
Func_json_equals() : Func_Bool("json_equals")
{
}
~Func_json_equals()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
bool getBoolVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_normalize class
*/
class Func_json_normalize : public Func_Str
{
public:
Func_json_normalize() : Func_Str("json_normalize")
{
}
virtual ~Func_json_normalize()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_type class
*/
class Func_json_type : public Func_Str
{
public:
Func_json_type() : Func_Str("json_type")
{
}
virtual ~Func_json_type()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_object class
*/
class Func_json_object : public Func_Str
{
public:
Func_json_object() : Func_Str("json_object")
{
}
virtual ~Func_json_object()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_array class
*/
class Func_json_array : public Func_Str
{
public:
Func_json_array() : Func_Str("json_array")
{
}
virtual ~Func_json_array()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_keys class
*/
class Func_json_keys : public Func_Str
{
protected:
JSONPath path;
public:
Func_json_keys() : Func_Str("json_keys")
{
}
virtual ~Func_json_keys()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_exists class
*/
class Func_json_exists : public Func_Bool
{
protected:
JSONPath path;
public:
Func_json_exists() : Func_Bool("json_exists")
{
}
~Func_json_exists()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
bool getBoolVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_quote class
*/
class Func_json_quote : public Func_Str
{
protected:
JSONPath path;
public:
Func_json_quote() : Func_Str("json_quote")
{
}
virtual ~Func_json_quote()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_unquote class
*/
class Func_json_unquote : public Func_Str
{
protected:
JSONPath path;
public:
Func_json_unquote() : Func_Str("json_unquote")
{
}
virtual ~Func_json_unquote()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_format class
*/
class Func_json_format : public Func_Str
{
public:
enum FORMATS
{
NONE,
COMPACT,
LOOSE,
DETAILED
};
protected:
FORMATS fmt;
public:
Func_json_format() : Func_Str("json_detailed"), fmt(DETAILED)
{
}
Func_json_format(FORMATS format) : fmt(format)
{
assert(format != NONE);
switch (format)
{
case DETAILED: Func_Str::Func::funcName("json_detailed"); break;
case LOOSE: Func_Str::Func::funcName("json_loose"); break;
case COMPACT: Func_Str::Func::funcName("json_compact"); break;
default: break;
}
}
virtual ~Func_json_format()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_merge_preserve class
*/
class Func_json_merge : public Func_Str
{
public:
Func_json_merge() : Func_Str("json_merge_preserve")
{
}
virtual ~Func_json_merge()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_merge_patch class
*/
class Func_json_merge_patch : public Func_Str
{
public:
Func_json_merge_patch() : Func_Str("json_merge_patch")
{
}
virtual ~Func_json_merge_patch()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_value class
*/
class Func_json_value : public Func_Str, public JSONPathWrapper
{
public:
Func_json_value() : Func_Str("json_value")
{
}
virtual ~Func_json_value()
{
}
bool checkAndGetValue(JSONEgWrapper* je, string& res, int* error) override
{
return je->checkAndGetScalar(res, error);
}
execplan::CalpontSystemCatalog::ColType operationType(
FunctionParm& fp, execplan::CalpontSystemCatalog::ColType& resultType) override;
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type) override;
};
/** @brief Func_json_query class
*/
class Func_json_query : public Func_Str, public JSONPathWrapper
{
public:
Func_json_query() : Func_Str("json_query")
{
}
virtual ~Func_json_query()
{
}
bool checkAndGetValue(JSONEgWrapper* je, string& res, int* error) override
{
return je->checkAndGetComplexVal(res, error);
}
execplan::CalpontSystemCatalog::ColType operationType(
FunctionParm& fp, execplan::CalpontSystemCatalog::ColType& resultType) override;
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type) override;
};
/** @brief Func_json_contains class
*/
class Func_json_contains : public Func_Bool
{
protected:
JSONPath path;
bool arg2Const;
bool arg2Parsed; // argument 2 is a constant or has been parsed
std::string_view arg2Val;
public:
Func_json_contains() : Func_Bool("json_contains"), arg2Const(false), arg2Parsed(false), arg2Val("")
{
}
virtual ~Func_json_contains()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
bool getBoolVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_array_append class
*/
class Func_json_array_append : public Func_Str
{
protected:
std::vector<JSONPath> paths;
public:
Func_json_array_append() : Func_Str("json_array_append")
{
}
virtual ~Func_json_array_append()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
private:
static const int padding = 8;
};
/** @brief Func_json_array_insert class
*/
class Func_json_array_insert : public Func_Str
{
protected:
std::vector<JSONPath> paths;
public:
Func_json_array_insert() : Func_Str("json_array_insert")
{
}
virtual ~Func_json_array_insert()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_insert class
*/
class Func_json_insert : public Func_Str
{
public:
enum MODE
{
NONE,
INSERT,
REPLACE,
SET
};
protected:
MODE mode;
std::vector<JSONPath> paths;
public:
Func_json_insert() : Func_Str("json_insert"), mode(INSERT)
{
}
Func_json_insert(MODE m) : mode(m)
{
assert(m != NONE);
switch (m)
{
case INSERT: Func_Str::Func::funcName("json_insert"); break;
case REPLACE: Func_Str::Func::funcName("json_replace"); break;
case SET: Func_Str::Func::funcName("json_set"); break;
default: break;
}
}
virtual ~Func_json_insert()
{
}
MODE getMode() const
{
return mode;
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_remove class
*/
class Func_json_remove : public Func_Str
{
protected:
std::vector<JSONPath> paths;
public:
Func_json_remove() : Func_Str("json_remove")
{
}
virtual ~Func_json_remove()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_contains_path class
*/
class Func_json_contains_path : public Func_Bool
{
protected:
std::vector<JSONPath> paths;
std::vector<bool> hasFound;
bool isModeOne;
bool isModeConst;
bool isModeParsed;
public:
Func_json_contains_path()
: Func_Bool("json_contains_path"), isModeOne(false), isModeConst(false), isModeParsed(false)
{
}
virtual ~Func_json_contains_path()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
bool getBoolVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_overlaps class
*/
class Func_json_overlaps : public Func_Bool
{
protected:
JSONPath path;
public:
Func_json_overlaps() : Func_Bool("json_overlaps")
{
}
virtual ~Func_json_overlaps()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
bool getBoolVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
};
/** @brief Func_json_search class
*/
class Func_json_search : public Func_Str
{
protected:
std::vector<JSONPath> paths;
bool isModeParsed;
bool isModeConst;
bool isModeOne;
int escape;
public:
Func_json_search()
: Func_Str("json_search"), isModeParsed(false), isModeConst(false), isModeOne(false), escape('\\')
{
}
virtual ~Func_json_search()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
private:
int cmpJSValWild(json_engine_t* jsEg, const string_view& cmpStr, const CHARSET_INFO* cs);
};
/** @brief Func_json_extract_string class
*/
class Func_json_extract : public Func_Str
{
protected:
std::vector<JSONPath> paths;
public:
Func_json_extract() : Func_Str("json_extract")
{
}
virtual ~Func_json_extract()
{
}
execplan::CalpontSystemCatalog::ColType operationType(FunctionParm& fp,
execplan::CalpontSystemCatalog::ColType& resultType);
std::string getStrVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
int64_t getIntVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
double getDoubleVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
execplan::IDB_Decimal getDecimalVal(rowgroup::Row& row, FunctionParm& fp, bool& isNull,
execplan::CalpontSystemCatalog::ColType& type);
private:
int doExtract(rowgroup::Row& row, FunctionParm& fp, json_value_types* type, std::string& retJS,
bool compareWhole);
};
} // namespace funcexp

View File

@ -0,0 +1,374 @@
#include "jsonhelpers.h"
using namespace std;
namespace funcexp
{
namespace helpers
{
int setupJSPath(json_path_t* path, CHARSET_INFO* cs, const string_view& str, bool wildcards = true)
{
int err = json_path_setup(path, cs, (const uchar*)str.data(), (const uchar*)str.data() + str.size());
if (wildcards)
return err;
if (!err)
{
#ifdef MYSQL_GE_1009
bool support = (path->types_used & (JSON_PATH_WILD | JSON_PATH_DOUBLE_WILD | JSON_PATH_ARRAY_RANGE)) == 0;
#else
bool support = (path->types_used & (JSON_PATH_WILD | JSON_PATH_DOUBLE_WILD)) == 0;
#endif
if (support)
return 0;
path->s.error = NO_WILDCARD_ALLOWED;
}
return 1;
}
bool appendEscapedJS(string& ret, const CHARSET_INFO* retCS, const string_view& js, const CHARSET_INFO* jsCS)
{
const int jsLen = js.size();
const char* rawJS = js.data();
int strLen = jsLen * 12 * jsCS->mbmaxlen / jsCS->mbminlen;
char* buf = (char*)alloca(strLen);
if ((strLen = json_escape(retCS, (const uchar*)rawJS, (const uchar*)rawJS + jsLen, jsCS, (uchar*)buf,
(uchar*)buf + strLen)) > 0)
{
buf[strLen] = '\0';
ret.append(buf, strLen);
return false;
}
return true;
}
bool appendJSKeyName(string& ret, const CHARSET_INFO* retCS, rowgroup::Row& row, execplan::SPTP& parm)
{
bool nullVal = false;
const string_view js = parm->data()->getStrVal(row, nullVal);
if (nullVal)
{
ret.append("\"\": ");
return false;
}
ret.append("\"");
if (appendEscapedJS(ret, retCS, js, parm->data()->resultType().getCharset()))
return true;
ret.append("\": ");
return false;
}
bool appendJSValue(string& ret, const CHARSET_INFO* retCS, rowgroup::Row& row, execplan::SPTP& parm)
{
bool nullVal = false;
const string_view js = parm->data()->getStrVal(row, nullVal);
if (nullVal)
{
ret.append("null");
return false;
}
datatypes::SystemCatalog::ColDataType dataType = parm->data()->resultType().colDataType;
if (dataType == datatypes::SystemCatalog::BIGINT && (js == "true" || js == "false"))
{
ret.append(js);
return false;
}
const CHARSET_INFO* jsCS = parm->data()->resultType().getCharset();
if (isCharType(dataType))
{
ret.append("\"");
if (appendEscapedJS(ret, retCS, js, jsCS))
return true;
ret.append("\"");
return false;
}
return appendEscapedJS(ret, retCS, js, jsCS);
}
int appendTab(string& js, const int depth, const int tabSize)
{
try
{
js.append("\n");
for (int i = 0; i < depth; i++)
{
js.append(tab_arr, tabSize);
}
}
catch (const std::exception& e)
{
return 1;
}
return 0;
}
int doFormat(json_engine_t* je, string& niceJS, Func_json_format::FORMATS mode, int tabSize)
{
int depth = 0;
static const char *comma = ", ", *colon = "\": ";
uint commaLen, colonLen;
int firstValue = 1;
niceJS.reserve(je->s.str_end - je->s.c_str + 32);
assert(mode != Func_json_format::DETAILED || (tabSize >= 0 && tabSize <= TAB_SIZE_LIMIT));
if (mode == Func_json_format::LOOSE)
{
commaLen = 2;
colonLen = 3;
}
else if (mode == Func_json_format::DETAILED)
{
commaLen = 1;
colonLen = 3;
}
else
{
commaLen = 1;
colonLen = 2;
}
do
{
switch (je->state)
{
case JST_KEY:
{
const uchar* key_start = je->s.c_str;
const uchar* key_end;
do
{
key_end = je->s.c_str;
} while (json_read_keyname_chr(je) == 0);
if (unlikely(je->s.error))
goto error;
if (!firstValue)
niceJS.append(comma, commaLen);
if (mode == Func_json_format::DETAILED && appendTab(niceJS, depth, tabSize))
goto error;
niceJS.append("\"");
niceJS.append((const char*)key_start, (int)(key_end - key_start));
niceJS.append(colon, colonLen);
}
/* now we have key value to handle, so no 'break'. */
DBUG_ASSERT(je->state == JST_VALUE);
goto handle_value;
case JST_VALUE:
if (!firstValue)
niceJS.append(comma, commaLen);
if (mode == Func_json_format::DETAILED && depth > 0 && appendTab(niceJS, depth, tabSize))
goto error;
handle_value:
if (json_read_value(je))
goto error;
if (json_value_scalar(je))
{
niceJS.append((const char*)je->value_begin, (int)(je->value_end - je->value_begin));
firstValue = 0;
}
else
{
if (mode == Func_json_format::DETAILED && depth > 0 && appendTab(niceJS, depth, tabSize))
goto error;
niceJS.append((je->value_type == JSON_VALUE_OBJECT) ? "{" : "[");
firstValue = 1;
depth++;
}
break;
case JST_OBJ_END:
case JST_ARRAY_END:
depth--;
if (mode == Func_json_format::DETAILED && appendTab(niceJS, depth, tabSize))
goto error;
niceJS.append((je->state == JST_OBJ_END) ? "}" : "]");
firstValue = 0;
break;
default: break;
};
} while (json_scan_next(je) == 0);
return je->s.error || *je->killed_ptr;
error:
return 1;
}
bool findKeyInObject(json_engine_t* jsEg, json_string_t* key)
{
const uchar* str = key->c_str;
while (json_scan_next(jsEg) == 0 && jsEg->state != JST_OBJ_END)
{
DBUG_ASSERT(jsEg->state == JST_KEY);
if (json_key_matches(jsEg, key))
return true;
if (json_skip_key(jsEg))
return false;
key->c_str = str;
}
return false;
}
int cmpPartJSPath(const json_path_step_t* a, const json_path_step_t* aEnd, const json_path_step_t* b,
const json_path_step_t* bEnd, enum json_value_types vt, const int* arraySize)
{
int ret, ret2;
const json_path_step_t* tmpB = b;
while (a <= aEnd)
{
if (b > bEnd)
{
while (vt != JSON_VALUE_ARRAY && (a->type & JSON_PATH_ARRAY_WILD) == JSON_PATH_ARRAY && a->n_item == 0)
{
if (++a > aEnd)
return 0;
}
return -2;
}
DBUG_ASSERT((b->type & (JSON_PATH_WILD | JSON_PATH_DOUBLE_WILD)) == 0);
if (a->type & JSON_PATH_ARRAY)
{
if (b->type & JSON_PATH_ARRAY)
{
#ifdef MYSQL_GE_1009
int ret = 0, corrected_n_item_a = 0;
if (arraySize)
corrected_n_item_a = a->n_item < 0 ? arraySize[b - tmpB] + a->n_item : a->n_item;
if (a->type & JSON_PATH_ARRAY_RANGE)
{
int corrected_n_item_end_a = 0;
if (arraySize)
corrected_n_item_end_a = a->n_item_end < 0 ? arraySize[b - tmpB] + a->n_item_end : a->n_item_end;
ret = b->n_item >= corrected_n_item_a && b->n_item <= corrected_n_item_end_a;
}
else
ret = corrected_n_item_a == b->n_item;
if ((a->type & JSON_PATH_WILD) || ret)
goto step_fits;
goto step_failed;
#else
if ((a->type & JSON_PATH_WILD) || a->n_item == b->n_item)
goto step_fits;
goto step_failed;
#endif
}
if ((a->type & JSON_PATH_WILD) == 0 && a->n_item == 0)
goto step_fits_autowrap;
goto step_failed;
}
else /* JSON_PATH_KEY */
{
if (!(b->type & JSON_PATH_KEY))
goto step_failed;
if (!(a->type & JSON_PATH_WILD) &&
(a->key_end - a->key != b->key_end - b->key || memcmp(a->key, b->key, a->key_end - a->key) != 0))
goto step_failed;
goto step_fits;
}
step_failed:
if (!(a->type & JSON_PATH_DOUBLE_WILD))
return -1;
b++;
continue;
step_fits:
b++;
if (!(a->type & JSON_PATH_DOUBLE_WILD))
{
a++;
continue;
}
/* Double wild handling needs recursions. */
ret = cmpPartJSPath(a + 1, aEnd, b, bEnd, vt, arraySize ? arraySize + (b - tmpB) : NULL);
if (ret == 0)
return 0;
ret2 = cmpPartJSPath(a, aEnd, b, bEnd, vt, arraySize ? arraySize + (b - tmpB) : NULL);
return (ret2 >= 0) ? ret2 : ret;
step_fits_autowrap:
if (!(a->type & JSON_PATH_DOUBLE_WILD))
{
a++;
continue;
}
/* Double wild handling needs recursions. */
ret = cmpPartJSPath(a + 1, aEnd, b + 1, bEnd, vt, arraySize ? arraySize + (b - tmpB) : NULL);
if (ret == 0)
return 0;
ret2 = cmpPartJSPath(a, aEnd, b + 1, bEnd, vt, arraySize ? arraySize + (b - tmpB) : NULL);
return (ret2 >= 0) ? ret2 : ret;
}
return b <= bEnd;
}
int cmpJSPath(const json_path_t* a, const json_path_t* b, enum json_value_types vt, const int* arraySize)
{
return cmpPartJSPath(a->steps + 1, a->last_step, b->steps + 1, b->last_step, vt, arraySize);
}
int parseJSPath(JSONPath& path, rowgroup::Row& row, execplan::SPTP& parm, bool wildcards)
{
// check if path column is const
if (!path.constant)
markConstFlag(path, parm);
bool isNull = false;
const string_view jsp = parm->data()->getStrVal(row, isNull);
if (isNull || setupJSPath(&path.p, getCharset(parm), jsp, wildcards))
return 1;
path.parsed = path.constant;
return 0;
}
bool matchJSPath(const vector<funcexp::JSONPath>& paths, const json_path_t* p, json_value_types valType,
const int* arrayCounter, bool exact)
{
for (size_t curr = 0; curr < paths.size(); curr++)
{
#ifdef MYSQL_GE_1009
int cmp = cmpJSPath(&paths[curr].p, p, valType, arrayCounter);
#else
int cmp = cmpJSPath(&paths[curr].p, p, valType);
#endif
bool ret = exact ? cmp >= 0 : cmp == 0;
if (ret)
return true;
}
return false;
}
} // namespace helpers
} // namespace funcexp

107
utils/funcexp/jsonhelpers.h Normal file
View File

@ -0,0 +1,107 @@
#pragma once
#include <cstddef>
#include <string>
#include <string_view>
#include <vector>
#define PREFER_MY_CONFIG_H
#include <mariadb.h>
#include <mysql.h>
#include <my_sys.h>
#include <json_lib.h>
#include "collation.h"
#include "functor_json.h"
#include "functor_str.h"
#include "collation.h"
#include "rowgroup.h"
#include "treenode.h"
#include "functioncolumn.h"
#include "constantcolumn.h"
namespace funcexp
{
namespace helpers
{
static const int NO_WILDCARD_ALLOWED = 1;
/*
Checks if the path has '.*' '[*]' or '**' constructions
and sets the NO_WILDCARD_ALLOWED error if the case.
*/
int setupJSPath(json_path_t* path, CHARSET_INFO* cs, const string_view& str, bool wildcards);
// Return true if err occur, let the outer function handle the exception
bool appendEscapedJS(string& ret, const CHARSET_INFO* retCS, const string_view& js, const CHARSET_INFO* jsCS);
bool appendJSKeyName(string& ret, const CHARSET_INFO* retCS, rowgroup::Row& row, execplan::SPTP& parm);
bool appendJSValue(string& ret, const CHARSET_INFO* retCS, rowgroup::Row& row, execplan::SPTP& parm);
static const int TAB_SIZE_LIMIT = 8;
static const char tab_arr[TAB_SIZE_LIMIT + 1] = " ";
// Format the json using format mode
int doFormat(json_engine_t* je, string& niceJS, Func_json_format::FORMATS mode, int tabSize = 4);
static const int SHOULD_END_WITH_ARRAY = 2;
static const int TRIVIAL_PATH_NOT_ALLOWED = 3;
bool findKeyInObject(json_engine_t* jsEg, json_string_t* key);
#ifdef MYSQL_GE_1009
using IntType = int;
#else
using IntType = uint;
#endif
/*
Compatible with json_find_path function in json_lib
before 10.9: uint* array_counters
after 10.9: int* array_counters
*/
inline static int locateJSPath(json_engine_t& jsEg, JSONPath& path, int* jsErr = nullptr)
{
IntType arrayCounters[JSON_DEPTH_LIMIT];
path.currStep = path.p.steps;
if (json_find_path(&jsEg, &path.p, &path.currStep, arrayCounters))
{
if (jsErr && jsEg.s.error)
*jsErr = 1;
return 1;
}
return 0;
}
// Check and set the constant flag from function parameters
inline static void markConstFlag(JSONPath& path, const execplan::SPTP& parm)
{
path.constant = (dynamic_cast<execplan::ConstantColumn*>(parm->data()) != nullptr);
}
int cmpJSPath(const json_path_t* a, const json_path_t* b, enum json_value_types vt,
const int* arraySize = nullptr);
inline const CHARSET_INFO* getCharset(execplan::SPTP& parm)
{
return parm->data()->resultType().getCharset();
}
inline void initJSEngine(json_engine_t& jsEg, const CHARSET_INFO* jsCS, const string_view& js)
{
json_scan_start(&jsEg, jsCS, (const uchar*)js.data(), (const uchar*)js.data() + js.size());
}
int parseJSPath(JSONPath& path, rowgroup::Row& row, execplan::SPTP& parm, bool wildcards = true);
inline void initJSPaths(vector<JSONPath>& paths, FunctionParm& fp, const int start, const int step)
{
if (paths.size() == 0)
for (size_t i = start; i < fp.size(); i += step)
paths.push_back(JSONPath{});
}
bool matchJSPath(const vector<funcexp::JSONPath>& paths, const json_path_t* p, json_value_types valType,
const int* arrayCounter = nullptr, bool exact = true);
} // namespace helpers
} // namespace funcexp