diff --git a/datatypes/mcs_datatype.cpp b/datatypes/mcs_datatype.cpp index 03b5f8e07..7045ab44e 100644 --- a/datatypes/mcs_datatype.cpp +++ b/datatypes/mcs_datatype.cpp @@ -428,6 +428,7 @@ const TypeHandler* TypeHandler::find_by_ddltype(const ddlpackage::ColumnType& ct case ddlpackage::DDL_CLOB: return &mcs_type_handler_clob; case ddlpackage::DDL_BLOB: return &mcs_type_handler_blob; case ddlpackage::DDL_TEXT: return &mcs_type_handler_text; + case ddlpackage::DDL_JSON: return &mcs_type_handler_text; case ddlpackage::DDL_UNSIGNED_TINYINT: return &mcs_type_handler_uint8; case ddlpackage::DDL_UNSIGNED_SMALLINT: return &mcs_type_handler_uint16; diff --git a/dbcon/ddlpackage/ddl.l b/dbcon/ddlpackage/ddl.l index 5404542f0..24b7f586a 100644 --- a/dbcon/ddlpackage/ddl.l +++ b/dbcon/ddlpackage/ddl.l @@ -134,6 +134,7 @@ INDEX {return INDEX;} INITIALLY {return INITIALLY;} INT {return IDB_INT;} INTEGER {return INTEGER;} +JSON { return JSON; } KEY {return KEY;} MATCH {return MATCH;} MAX_ROWS {return MAX_ROWS;} diff --git a/dbcon/ddlpackage/ddl.y b/dbcon/ddlpackage/ddl.y index 46d31bad8..16e09b1be 100644 --- a/dbcon/ddlpackage/ddl.y +++ b/dbcon/ddlpackage/ddl.y @@ -56,7 +56,7 @@ int ddllex(YYSTYPE* ddllval, void* yyscanner); void ddlerror(struct pass_to_bison* x, char const *s); char* copy_string(const char *str); -void fix_column_length_and_charset(SchemaObject* elem, const CHARSET_INFO* def_cs, myf utf8_flag) +void postprocess_column_information(SchemaObject* elem, const CHARSET_INFO* def_cs, myf utf8_flag) { auto* column = dynamic_cast(elem); @@ -104,6 +104,17 @@ void fix_column_length_and_charset(SchemaObject* elem, const CHARSET_INFO* def_c else column->fType->fLength = 16777215; } + if (column->fType->fType == DDL_JSON) + { + CHARSET_INFO* cs = &my_charset_utf8mb4_bin; + + column->fType->fCharset = cs->cs_name.str; + column->fType->fCollate = cs->coll_name.str; + column->fType->fCharsetNum = cs->number; + + column->fType->fLength = 16777215; + column->fConstraints.push_back(new ColumnConstraintDef(DDL_VALIDATE_JSON)); + } } %} @@ -155,7 +166,7 @@ CHARACTER CHECK CLOB COLUMN BOOL BOOLEAN COLUMNS COMMENT CONSTRAINT CONSTRAINTS CREATE CURRENT_USER DATETIME DEC DECIMAL DEFAULT DEFERRABLE DEFERRED IDB_DELETE DROP ENGINE -FOREIGN FULL IMMEDIATE INDEX INITIALLY IDB_INT INTEGER KEY LONGBLOB LONGTEXT +FOREIGN FULL IMMEDIATE INDEX INITIALLY IDB_INT INTEGER JSON KEY LONGBLOB LONGTEXT MATCH MAX_ROWS MEDIUMBLOB MEDIUMTEXT MEDIUMINT MIN_ROWS MODIFY NO NOT NULL_TOK NUMBER NUMERIC ON PARTIAL PRECISION PRIMARY REFERENCES RENAME RESTRICT SET SMALLINT TABLE TEXT TINYBLOB TINYTEXT @@ -355,7 +366,7 @@ create_table_statement: { for (auto* elem : *$6) { - fix_column_length_and_charset(elem, x->default_table_charset, x->utf8_flag); + postprocess_column_information(elem, x->default_table_charset, x->utf8_flag); } $$ = new CreateTableStatement(new TableDef($4, $6, $8)); } @@ -719,17 +730,17 @@ ata_add_column: /* See the documentation for SchemaObject for an explanation of why we are using * dynamic_cast here. */ - ADD column_def { fix_column_length_and_charset($2, x->default_table_charset, x->utf8_flag); $$ = new AtaAddColumn(dynamic_cast($2));} - | ADD COLUMN column_def { fix_column_length_and_charset($3, x->default_table_charset, x->utf8_flag); $$ = new AtaAddColumn(dynamic_cast($3));} + ADD column_def { postprocess_column_information($2, x->default_table_charset, x->utf8_flag); $$ = new AtaAddColumn(dynamic_cast($2));} + | ADD COLUMN column_def { postprocess_column_information($3, x->default_table_charset, x->utf8_flag); $$ = new AtaAddColumn(dynamic_cast($3));} | ADD '(' table_element_list ')' { for (auto* elem : *$3) { - fix_column_length_and_charset(elem, x->default_table_charset, x->utf8_flag); + postprocess_column_information(elem, x->default_table_charset, x->utf8_flag); } $$ = new AtaAddColumns($3); } | ADD COLUMN '(' table_element_list ')' { for (auto* elem : *$4) { - fix_column_length_and_charset(elem, x->default_table_charset, x->utf8_flag); + postprocess_column_information(elem, x->default_table_charset, x->utf8_flag); } $$ = new AtaAddColumns($4); } @@ -1067,6 +1078,11 @@ text_type: $$ = new ColumnType(DDL_TEXT); $$->fLength = 16777215; } + | JSON + { + $$ = new ColumnType(DDL_JSON); + $$->fLength = 16777215; + } ; numeric_type: diff --git a/dbcon/ddlpackage/ddlpkg.h b/dbcon/ddlpackage/ddlpkg.h index be98572f6..e6ec6bb4c 100644 --- a/dbcon/ddlpackage/ddlpkg.h +++ b/dbcon/ddlpackage/ddlpkg.h @@ -160,6 +160,7 @@ enum DDL_CONSTRAINTS DDL_REFERENCES, DDL_NOT_NULL, DDL_AUTO_INCREMENT, + DDL_VALIDATE_JSON, DDL_INVALID_CONSTRAINT }; /** @brief @@ -170,7 +171,8 @@ const std::string ConstraintString[] = {"primary", "unique", "references", "not_null", - "auto_increment" + "auto_increment", + "validate_json", ""}; /** @brief Datatype List @@ -210,6 +212,7 @@ enum DDL_DATATYPES DDL_TEXT, DDL_TIME, DDL_TIMESTAMP, + DDL_JSON, DDL_INVALID_DATATYPE }; @@ -956,6 +959,7 @@ struct ColumnType /** @brief Is the TEXT column has explicit defined length, ie TEXT(1717) */ bool fExplicitLength; + }; /** @brief A column constraint definition. diff --git a/dbcon/ddlpackage/serialize.cpp b/dbcon/ddlpackage/serialize.cpp index 6dff88362..622ee91c1 100644 --- a/dbcon/ddlpackage/serialize.cpp +++ b/dbcon/ddlpackage/serialize.cpp @@ -1167,6 +1167,7 @@ int ColumnType::serialize(ByteStream& bytestream) messageqcpp::ByteStream::octbyte nextVal = fNextvalue; messageqcpp::ByteStream::quadbyte charsetNum = fCharsetNum; + // write column types bytestream << ftype; bytestream << length; diff --git a/dbcon/ddlpackageproc/ddlpackageprocessor.cpp b/dbcon/ddlpackageproc/ddlpackageprocessor.cpp index 7dc815de0..ef37fa3fa 100644 --- a/dbcon/ddlpackageproc/ddlpackageprocessor.cpp +++ b/dbcon/ddlpackageproc/ddlpackageprocessor.cpp @@ -195,9 +195,10 @@ execplan::CalpontSystemCatalog::ColDataType DDLPackageProcessor::convertDataType case ddlpackage::DDL_BLOB: colDataType = CalpontSystemCatalog::BLOB; break; - case ddlpackage::DDL_TEXT: colDataType = CalpontSystemCatalog::TEXT; break; + case ddlpackage::DDL_TEXT: + case ddlpackage::DDL_JSON: colDataType = CalpontSystemCatalog::TEXT; break; - default: throw runtime_error("Unsupported datatype!"); + default: throw runtime_error("Unsupported DDL datatype!"); } return colDataType; @@ -228,6 +229,8 @@ std::string DDLPackageProcessor::buildTableConstraintName(const int oid, ddlpack case ddlpackage::DDL_NOT_NULL: prefix = "nk_"; break; + case ddlpackage::DDL_VALIDATE_JSON: prefix = "jk_"; break; + default: throw runtime_error("Unsupported constraint type!"); break; } @@ -261,6 +264,8 @@ std::string DDLPackageProcessor::buildColumnConstraintName(const std::string& sc case ddlpackage::DDL_NOT_NULL: prefix = "nk_"; break; + case ddlpackage::DDL_VALIDATE_JSON: prefix = "jk_"; break; + default: throw runtime_error("Unsupported constraint type!"); break; } @@ -288,6 +293,8 @@ char DDLPackageProcessor::getConstraintCode(ddlpackage::DDL_CONSTRAINTS type) case ddlpackage::DDL_NOT_NULL: constraint_type = 'n'; break; + case ddlpackage::DDL_VALIDATE_JSON: constraint_type = 'j'; break; + default: constraint_type = '0'; break; } diff --git a/dbcon/mysql/ha_mcs_ddl.cpp b/dbcon/mysql/ha_mcs_ddl.cpp index 703fee1e5..94bbd596e 100644 --- a/dbcon/mysql/ha_mcs_ddl.cpp +++ b/dbcon/mysql/ha_mcs_ddl.cpp @@ -125,7 +125,7 @@ CalpontSystemCatalog::ColDataType convertDataType(const ddlpackage::ColumnType& const datatypes::TypeHandler* h = datatypes::TypeHandler::find_by_ddltype(ct); if (!h) { - throw runtime_error("Unsupported datatype!"); + throw runtime_error("Unsupported datatype to convert from!"); return CalpontSystemCatalog::UNDEFINED; } return h->code(); @@ -822,10 +822,11 @@ int ProcessDDLStatement(string& ddlStatement, string& schema, const string& /*ta if (createTable->fTableDef->fColumns[i]->fConstraints.size() > 0) { - // support default value and NOT NULL constraint + // support default value, JSON validation and NOT NULL constraint for (uint32_t j = 0; j < createTable->fTableDef->fColumns[i]->fConstraints.size(); j++) { - if (createTable->fTableDef->fColumns[i]->fConstraints[j]->fConstraintType != DDL_NOT_NULL) + auto ctype = createTable->fTableDef->fColumns[i]->fConstraints[j]->fConstraintType; + if (ctype != DDL_NOT_NULL && ctype != DDL_VALIDATE_JSON) { rc = 1; thd->get_stmt_da()->set_overwrite_status(true); @@ -1226,7 +1227,8 @@ int ProcessDDLStatement(string& ddlStatement, string& schema, const string& /*ta // support default value and NOT NULL constraint for (uint32_t j = 0; j < addColumnPtr->fColumnDef->fConstraints.size(); j++) { - if (addColumnPtr->fColumnDef->fConstraints[j]->fConstraintType != DDL_NOT_NULL) + auto ctype = addColumnPtr->fColumnDef->fConstraints[j]->fConstraintType; + if (ctype != DDL_NOT_NULL && ctype != DDL_VALIDATE_JSON) { rc = 1; thd->get_stmt_da()->set_overwrite_status(true); @@ -1359,6 +1361,7 @@ int ProcessDDLStatement(string& ddlStatement, string& schema, const string& /*ta } // For TIMESTAMP, if no constraint is given, default to NOT NULL + // XXX: see same code conditionally enabled for specific MariaDB version. if (addColumnPtr->fColumnDef->fType->fType == ddlpackage::DDL_TIMESTAMP && addColumnPtr->fColumnDef->fConstraints.empty()) { @@ -1611,7 +1614,8 @@ int ProcessDDLStatement(string& ddlStatement, string& schema, const string& /*ta //@Bug 5274. support default value and NOT NULL constraint for (uint32_t j = 0; j < addColumnsPtr->fColumns[0]->fConstraints.size(); j++) { - if (addColumnsPtr->fColumns[0]->fConstraints[j]->fConstraintType != DDL_NOT_NULL) + auto ctype = addColumnsPtr->fColumns[0]->fConstraints[j]->fConstraintType; + if (ctype != DDL_NOT_NULL && ctype != DDL_VALIDATE_JSON) { rc = 1; thd->get_stmt_da()->set_overwrite_status(true); @@ -1744,6 +1748,7 @@ int ProcessDDLStatement(string& ddlStatement, string& schema, const string& /*ta } // For TIMESTAMP, if no constraint is given, default to NOT NULL + // XXX: please see conditional to MariaDB version enablement of similar code. if (addColumnsPtr->fColumns[0]->fType->fType == ddlpackage::DDL_TIMESTAMP && addColumnsPtr->fColumns[0]->fConstraints.empty()) { @@ -2073,6 +2078,16 @@ int ProcessDDLStatement(string& ddlStatement, string& schema, const string& /*ta return rc; } } + else if (renameColumnsPtr->fConstraints[j]->fConstraintType == DDL_VALIDATE_JSON) + { + rc = 1; + thd->get_stmt_da()->set_overwrite_status(true); + thd->raise_error_printf(ER_CHECK_NOT_IMPLEMENTED, + (IDBErrorInfo::instance()->errorMsg(ERR_CONSTRAINTS)).c_str()); + ci->alterTableState = cal_connection_info::NOT_ALTER; + ci->isAlter = false; + return rc; + } else { rc = 1; diff --git a/mysql-test/columnstore/basic/r/JSON-type.result b/mysql-test/columnstore/basic/r/JSON-type.result new file mode 100644 index 000000000..faee378b3 --- /dev/null +++ b/mysql-test/columnstore/basic/r/JSON-type.result @@ -0,0 +1,16 @@ +DROP DATABASE IF EXISTS JSON_type; +CREATE DATABASE JSON_type; +USE JSON_type; +CREATE TABLE tj(j JSON) ENGINE=COLUMNSTORE; +INSERT INTO tj(j) VALUES ('()'); +ERROR 23000: CONSTRAINT `tj.j` failed for `JSON_type`.`tj` +INSERT INTO tj(j) VALUES ('[]'), ('{}'), ('"a"'); +SELECT * FROM tj WHERE j = '"A"'; +j +SELECT * FROM tj WHERE j = '"a"'; +j +"a" +INSERT INTO tj(j) VALUES ('{"a":"b", "b":"a"}'); +SELECT * FROM tj WHERE j = '{"b":"a","a":"b"}'; +j +DROP DATABASE JSON_type; diff --git a/mysql-test/columnstore/basic/r/mcs59_nonscalar_datatypes.result b/mysql-test/columnstore/basic/r/mcs59_nonscalar_datatypes.result index d98562ac2..9cc0502bb 100644 --- a/mysql-test/columnstore/basic/r/mcs59_nonscalar_datatypes.result +++ b/mysql-test/columnstore/basic/r/mcs59_nonscalar_datatypes.result @@ -5,6 +5,4 @@ CREATE TABLE t_enum(col ENUM('min','max','avg'))ENGINE=Columnstore; ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. CREATE TABLE t_set(col SET('x','y','z'))ENGINE=Columnstore; ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. -CREATE TABLE t_json(col JSON)ENGINE=Columnstore; -ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. DROP DATABASE mcs59_db; diff --git a/mysql-test/columnstore/basic/r/mcs9_create_table_negative.result b/mysql-test/columnstore/basic/r/mcs9_create_table_negative.result index ee1fe7a3e..dabfabb8e 100644 --- a/mysql-test/columnstore/basic/r/mcs9_create_table_negative.result +++ b/mysql-test/columnstore/basic/r/mcs9_create_table_negative.result @@ -7,8 +7,6 @@ CREATE TABLE t2(t2_binary BINARY(3))ENGINE=Columnstore; ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. CREATE TABLE t3(t3_set SET('a','b'))ENGINE=Columnstore; ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. -CREATE TABLE t4(t4_json JSON)ENGINE=Columnstore; -ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. CREATE TABLE $table(col1 INT)ENGINE=columnstore; ERROR 42000: The storage engine for the table doesn't support The syntax or the data type(s) is not supported by Columnstore. Please check the Columnstore syntax guide for supported syntax or data types. CREATE TABLE abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklm(col1 VARCHAR(90))ENGINE=Columnstore; diff --git a/mysql-test/columnstore/basic/t/JSON-type.test b/mysql-test/columnstore/basic/t/JSON-type.test new file mode 100644 index 000000000..2b96897c7 --- /dev/null +++ b/mysql-test/columnstore/basic/t/JSON-type.test @@ -0,0 +1,17 @@ +--disable_warnings +DROP DATABASE IF EXISTS JSON_type; +--enable_warnings +CREATE DATABASE JSON_type; +USE JSON_type; +CREATE TABLE tj(j JSON) ENGINE=COLUMNSTORE; +--error 4025 +INSERT INTO tj(j) VALUES ('()'); # invalid +INSERT INTO tj(j) VALUES ('[]'), ('{}'), ('"a"'); # valid +SELECT * FROM tj WHERE j = '"A"'; # empty set. +SELECT * FROM tj WHERE j = '"a"'; # single row. +INSERT INTO tj(j) VALUES ('{"a":"b", "b":"a"}'); +SELECT * FROM tj WHERE j = '{"b":"a","a":"b"}'; # empty set, comparison is not structural. +# UPDATE is not tested because it does not work. +#UPDATE tj SET j = CONCAT(j,'()'); +DROP DATABASE JSON_type; + diff --git a/mysql-test/columnstore/basic/t/mcs59_nonscalar_datatypes.test b/mysql-test/columnstore/basic/t/mcs59_nonscalar_datatypes.test index 8bb528edd..0c086a44d 100644 --- a/mysql-test/columnstore/basic/t/mcs59_nonscalar_datatypes.test +++ b/mysql-test/columnstore/basic/t/mcs59_nonscalar_datatypes.test @@ -15,8 +15,6 @@ USE mcs59_db; CREATE TABLE t_enum(col ENUM('min','max','avg'))ENGINE=Columnstore; --error ER_CHECK_NOT_IMPLEMENTED CREATE TABLE t_set(col SET('x','y','z'))ENGINE=Columnstore; ---error ER_CHECK_NOT_IMPLEMENTED -CREATE TABLE t_json(col JSON)ENGINE=Columnstore; # Clean UP DROP DATABASE mcs59_db; diff --git a/mysql-test/columnstore/basic/t/mcs9_create_table_negative.test b/mysql-test/columnstore/basic/t/mcs9_create_table_negative.test index 0717c3861..76537ed09 100644 --- a/mysql-test/columnstore/basic/t/mcs9_create_table_negative.test +++ b/mysql-test/columnstore/basic/t/mcs9_create_table_negative.test @@ -17,8 +17,6 @@ CREATE TABLE t1(t1_enum ENUM('one','two','three'))ENGINE=Columnstore; CREATE TABLE t2(t2_binary BINARY(3))ENGINE=Columnstore; --error 1178 CREATE TABLE t3(t3_set SET('a','b'))ENGINE=Columnstore; ---error 1178 -CREATE TABLE t4(t4_json JSON)ENGINE=Columnstore; # with unsupported table name --error 1178 diff --git a/writeengine/server/we_ddlcommon.h b/writeengine/server/we_ddlcommon.h index c3091dab3..82829ba38 100644 --- a/writeengine/server/we_ddlcommon.h +++ b/writeengine/server/we_ddlcommon.h @@ -178,7 +178,8 @@ inline int convertDataType(int dataType) case ddlpackage::DDL_BLOB: calpontDataType = execplan::CalpontSystemCatalog::BLOB; break; - case ddlpackage::DDL_TEXT: calpontDataType = execplan::CalpontSystemCatalog::TEXT; break; + case ddlpackage::DDL_TEXT: + case ddlpackage::DDL_JSON: calpontDataType = execplan::CalpontSystemCatalog::TEXT; break; case ddlpackage::DDL_UNSIGNED_TINYINT: calpontDataType = execplan::CalpontSystemCatalog::UTINYINT; break;